Commit be130b7b authored by Michael Kozono's avatar Michael Kozono

Merge branch 'master' into...

Merge branch 'master' into '217477-remove-feature-flags-to-make-registry-table-ssot-for-job-artifacts'

# Conflicts:
#   ee/spec/workers/geo/secondary/registry_consistency_worker_spec.rb
parents 3b4eb406 389ff4da
......@@ -2,7 +2,7 @@ import { slugify } from '~/lib/utils/text_utility';
import createGqClient, { fetchPolicies } from '~/lib/graphql';
import { SUPPORTED_FORMATS } from '~/lib/utils/unit_format';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { parseTemplatingVariables } from './variable_mapping';
import { mergeURLVariables, parseTemplatingVariables } from './variable_mapping';
import { DATETIME_RANGE_TYPES } from '~/lib/utils/constants';
import { timeRangeToParams, getRangeType } from '~/lib/utils/datetime_range';
import { isSafeURL, mergeUrlParams } from '~/lib/utils/url_utility';
......@@ -289,7 +289,7 @@ export const mapToDashboardViewModel = ({
}) => {
return {
dashboard,
variables: parseTemplatingVariables(templating),
variables: mergeURLVariables(parseTemplatingVariables(templating)),
links: links.map(mapLinksToViewModel),
panelGroups: panel_groups.map(mapToPanelGroupViewModel),
};
......
import { isString } from 'lodash';
import { templatingVariablesFromUrl } from '../utils';
import { VARIABLE_TYPES } from '../constants';
/**
......@@ -164,4 +165,39 @@ export const parseTemplatingVariables = ({ variables = {} } = {}) =>
return acc;
}, {});
/**
* Custom variables are defined in the dashboard yml file
* and their values can be passed through the URL.
*
* On component load, this method merges variables data
* from the yml file with URL data to store in the Vuex store.
* Not all params coming from the URL need to be stored. Only
* the ones that have a corresponding variable defined in the
* yml file.
*
* This ensures that there is always a single source of truth
* for variables
*
* This method can be improved further. See the below issue
* https://gitlab.com/gitlab-org/gitlab/-/issues/217713
*
* @param {Object} varsFromYML template variables from yml file
* @returns {Object}
*/
export const mergeURLVariables = (varsFromYML = {}) => {
const varsFromURL = templatingVariablesFromUrl();
const variables = {};
Object.keys(varsFromYML).forEach(key => {
if (Object.prototype.hasOwnProperty.call(varsFromURL, key)) {
variables[key] = {
...varsFromYML[key],
value: varsFromURL[key],
};
} else {
variables[key] = varsFromYML[key];
}
});
return variables;
};
export default {};
......@@ -170,11 +170,10 @@ export const convertVariablesForURL = variables =>
* begin with a constant prefix so that it doesn't collide with
* other URL params.
*
* @param {String} New URL
* @param {String} search URL
* @returns {Object} The custom variables defined by the user in the URL
*/
export const getPromCustomVariablesFromUrl = (search = window.location.search) => {
export const templatingVariablesFromUrl = (search = window.location.search) => {
const params = queryToObject(search);
// pick the params with variable prefix
const paramsWithVars = pickBy(params, (val, key) => key.startsWith(VARIABLE_PREFIX));
......@@ -353,39 +352,4 @@ export const barChartsDataParser = (data = []) =>
{},
);
/**
* Custom variables are defined in the dashboard yml file
* and their values can be passed through the URL.
*
* On component load, this method merges variables data
* from the yml file with URL data to store in the Vuex store.
* Not all params coming from the URL need to be stored. Only
* the ones that have a corresponding variable defined in the
* yml file.
*
* This ensures that there is always a single source of truth
* for variables
*
* This method can be improved further. See the below issue
* https://gitlab.com/gitlab-org/gitlab/-/issues/217713
*
* @param {Object} varsFromYML template variables from yml file
* @returns {Object}
*/
export const mergeURLVariables = (varsFromYML = {}) => {
const varsFromURL = getPromCustomVariablesFromUrl();
const variables = {};
Object.keys(varsFromYML).forEach(key => {
if (Object.prototype.hasOwnProperty.call(varsFromURL, key)) {
variables[key] = {
...varsFromYML[key],
value: varsFromURL[key],
};
} else {
variables[key] = varsFromYML[key];
}
});
return variables;
};
export default {};
......@@ -25,6 +25,11 @@ export default {
containerClasses: ['dag-graph-container', 'gl-display-flex', 'gl-flex-direction-column'].join(
' ',
),
hoverFadeClasses: [
'gl-cursor-pointer',
'gl-transition-duration-slow',
'gl-transition-timing-function-ease',
].join(' '),
},
gitLabColorRotation: [
'#e17223',
......@@ -230,7 +235,10 @@ export default {
.attr('id', d => {
return this.createAndAssignId(d, 'uid', LINK_SELECTOR);
})
.classed(`${LINK_SELECTOR} gl-cursor-pointer`, true);
.classed(
`${LINK_SELECTOR} gl-transition-property-stroke-opacity ${this.$options.viewOptions.hoverFadeClasses}`,
true,
);
},
generateNodes(svg, nodeData) {
......@@ -242,7 +250,10 @@ export default {
.data(nodeData)
.enter()
.append('line')
.classed(`${NODE_SELECTOR} gl-cursor-pointer`, true)
.classed(
`${NODE_SELECTOR} gl-transition-property-stroke ${this.$options.viewOptions.hoverFadeClasses}`,
true,
)
.attr('id', d => {
return this.createAndAssignId(d, 'uid', NODE_SELECTOR);
})
......
<script>
import { __ } from '~/locale';
import { GlIcon, GlLink, GlLoadingIcon, GlSprintf } from '@gitlab/ui';
import axios from '~/lib/utils/axios_utils';
import flash from '~/flash';
import Poll from '~/lib/utils/poll';
export default {
name: 'MRWidgetTerraformPlan',
components: {
GlIcon,
GlLink,
GlLoadingIcon,
GlSprintf,
},
props: {
endpoint: {
type: String,
required: true,
},
},
data() {
return {
loading: true,
plans: {},
};
},
computed: {
addNum() {
return Number(this.plan.create);
},
changeNum() {
return Number(this.plan.update);
},
deleteNum() {
return Number(this.plan.delete);
},
logUrl() {
return this.plan.job_path;
},
plan() {
const firstPlanKey = Object.keys(this.plans)[0];
return this.plans[firstPlanKey] ?? {};
},
validPlanValues() {
return this.addNum + this.changeNum + this.deleteNum >= 0;
},
},
created() {
this.fetchPlans();
},
methods: {
fetchPlans() {
this.loading = true;
const poll = new Poll({
resource: {
fetchPlans: () => axios.get(this.endpoint),
},
data: this.endpoint,
method: 'fetchPlans',
successCallback: ({ data }) => {
this.plans = data;
if (Object.keys(this.plan).length) {
this.loading = false;
poll.stop();
}
},
errorCallback: () => {
this.plans = {};
this.loading = false;
flash(__('An error occurred while loading terraform report'));
},
});
poll.makeRequest();
},
},
};
</script>
<template>
<section class="mr-widget-section">
<div class="mr-widget-body media d-flex flex-row">
<span class="append-right-default align-self-start align-self-lg-center">
<gl-icon name="status_warning" :size="24" />
</span>
<div class="d-flex flex-fill flex-column flex-md-row">
<div class="terraform-mr-plan-text normal d-flex flex-column flex-lg-row">
<p class="m-0 pr-1">{{ __('A terraform report was generated in your pipelines.') }}</p>
<gl-loading-icon v-if="loading" size="md" />
<p v-else-if="validPlanValues" class="m-0">
<gl-sprintf
:message="
__(
'Reported Resource Changes: %{addNum} to add, %{changeNum} to change, %{deleteNum} to delete',
)
"
>
<template #addNum>
<strong>{{ addNum }}</strong>
</template>
<template #changeNum>
<strong>{{ changeNum }}</strong>
</template>
<template #deleteNum>
<strong>{{ deleteNum }}</strong>
</template>
</gl-sprintf>
</p>
<p v-else class="m-0">{{ __('Changes are unknown') }}</p>
</div>
<div class="terraform-mr-plan-actions">
<gl-link
v-if="logUrl"
:href="logUrl"
target="_blank"
data-track-event="click_terraform_mr_plan_button"
data-track-label="mr_widget_terraform_mr_plan_button"
data-track-property="terraform_mr_plan_button"
class="btn btn-sm js-terraform-report-link"
rel="noopener"
>
{{ __('View full log') }}
<gl-icon name="external-link" />
</gl-link>
</div>
</div>
</div>
</section>
</template>
<script>
import { GlSkeletonLoading } from '@gitlab/ui';
import axios from '~/lib/utils/axios_utils';
import Poll from '~/lib/utils/poll';
import TerraformPlan from './terraform_plan.vue';
export default {
name: 'MRWidgetTerraformContainer',
components: {
GlSkeletonLoading,
TerraformPlan,
},
props: {
endpoint: {
type: String,
required: true,
},
},
data() {
return {
loading: true,
plans: {},
poll: null,
};
},
created() {
this.fetchPlans();
},
beforeDestroy() {
this.poll.stop();
},
methods: {
fetchPlans() {
this.loading = true;
this.poll = new Poll({
resource: {
fetchPlans: () => axios.get(this.endpoint),
},
data: this.endpoint,
method: 'fetchPlans',
successCallback: ({ data }) => {
this.plans = data;
if (Object.keys(this.plans).length) {
this.loading = false;
this.poll.stop();
}
},
errorCallback: () => {
this.plans = { bad_plan: {} };
this.loading = false;
this.poll.stop();
},
});
this.poll.makeRequest();
},
},
};
</script>
<template>
<section class="mr-widget-section">
<div v-if="loading" class="mr-widget-body media">
<gl-skeleton-loading />
</div>
<terraform-plan
v-for="(plan, key) in plans"
v-else
:key="key"
:plan="plan"
class="mr-widget-body media"
/>
</section>
</template>
<script>
import { __ } from '~/locale';
import { GlIcon, GlLink, GlSprintf } from '@gitlab/ui';
export default {
name: 'TerraformPlan',
components: {
GlIcon,
GlLink,
GlSprintf,
},
props: {
plan: {
required: true,
type: Object,
},
},
computed: {
addNum() {
return Number(this.plan.create);
},
changeNum() {
return Number(this.plan.update);
},
deleteNum() {
return Number(this.plan.delete);
},
reportChangeText() {
if (this.validPlanValues) {
return __(
'Reported Resource Changes: %{addNum} to add, %{changeNum} to change, %{deleteNum} to delete',
);
}
return __('Generating the report caused an error.');
},
reportHeaderText() {
if (this.plan.job_name) {
return __('The Terraform report %{name} was generated in your pipelines.');
}
return __('A Terraform report was generated in your pipelines.');
},
validPlanValues() {
return this.addNum + this.changeNum + this.deleteNum >= 0;
},
},
};
</script>
<template>
<div class="gl-display-flex">
<span
class="gl-display-flex gl-align-items-center gl-justify-content-center append-right-default gl-align-self-start gl-mt-1"
>
<gl-icon name="status_warning" :size="24" />
</span>
<div class="gl-display-flex gl-flex-fill-1 gl-flex-direction-column flex-md-row">
<div class="terraform-mr-plan-text normal gl-display-flex gl-flex-direction-column">
<p class="gl-m-0 gl-pr-1">
<gl-sprintf :message="reportHeaderText">
<template #name>
<strong>{{ plan.job_name }}</strong>
</template>
</gl-sprintf>
</p>
<p class="gl-m-0">
<gl-sprintf :message="reportChangeText">
<template #addNum>
<strong>{{ addNum }}</strong>
</template>
<template #changeNum>
<strong>{{ changeNum }}</strong>
</template>
<template #deleteNum>
<strong>{{ deleteNum }}</strong>
</template>
</gl-sprintf>
</p>
</div>
<div>
<gl-link
v-if="plan.job_path"
:href="plan.job_path"
target="_blank"
data-track-event="click_terraform_mr_plan_button"
data-track-label="mr_widget_terraform_mr_plan_button"
data-track-property="terraform_mr_plan_button"
class="btn btn-sm js-terraform-report-link"
rel="noopener"
>
{{ __('View full log') }}
<gl-icon name="external-link" />
</gl-link>
</div>
</div>
</div>
</template>
......@@ -36,7 +36,7 @@ import CheckingState from './components/states/mr_widget_checking.vue';
import eventHub from './event_hub';
import notify from '~/lib/utils/notify';
import SourceBranchRemovalStatus from './components/source_branch_removal_status.vue';
import TerraformPlan from './components/mr_widget_terraform_plan.vue';
import TerraformPlan from './components/terraform/mr_widget_terraform_container.vue';
import GroupedTestReportsApp from '../reports/components/grouped_test_reports_app.vue';
import { setFaviconOverlay } from '../lib/utils/common_utils';
import GroupedAccessibilityReportsApp from '../reports/accessibility_report/grouped_accessibility_reports_app.vue';
......
import { __ } from '~/locale';
import { generateToolbarItem } from './editor_service';
import buildCustomHTMLRenderer from './services/build_custom_renderer';
export const CUSTOM_EVENTS = {
openAddImageModal: 'gl_openAddImageModal',
......@@ -31,6 +32,7 @@ const TOOLBAR_ITEM_CONFIGS = [
export const EDITOR_OPTIONS = {
toolbarItems: TOOLBAR_ITEM_CONFIGS.map(config => generateToolbarItem(config)),
customHTMLRenderer: buildCustomHTMLRenderer(),
};
export const EDITOR_TYPES = {
......
import renderKramdownList from './renderers/render_kramdown_list';
import renderKramdownText from './renderers/render_kramdown_text';
const listRenderers = [renderKramdownList];
const textRenderers = [renderKramdownText];
const executeRenderer = (renderers, node, context) => {
const availableRenderer = renderers.find(renderer => renderer.canRender(node, context));
return availableRenderer ? availableRenderer.render(context) : context.origin();
};
const buildCustomRendererFunctions = (customRenderers, defaults) => {
const customTypes = Object.keys(customRenderers).filter(type => !defaults[type]);
const customEntries = customTypes.map(type => {
const fn = (node, context) => executeRenderer(customRenderers[type], node, context);
return [type, fn];
});
return Object.fromEntries(customEntries);
};
const buildCustomHTMLRenderer = (customRenderers = { list: [], text: [] }) => {
const defaults = {
list(node, context) {
const allListRenderers = [...customRenderers.list, ...listRenderers];
return executeRenderer(allListRenderers, node, context);
},
text(node, context) {
const allTextRenderers = [...customRenderers.text, ...textRenderers];
return executeRenderer(allTextRenderers, node, context);
},
};
return {
...buildCustomRendererFunctions(customRenderers, defaults),
...defaults,
};
};
export default buildCustomHTMLRenderer;
const buildToken = (type, tagName, props) => {
return { type, tagName, ...props };
};
export const buildUneditableOpenTokens = token => {
return [
buildToken('openTag', 'div', {
attributes: { contenteditable: false },
classNames: [
'gl-px-4 gl-py-2 gl-opacity-5 gl-bg-gray-100 gl-user-select-none gl-cursor-not-allowed',
],
}),
token,
];
};
export const buildUneditableCloseToken = () => buildToken('closeTag', 'div');
export const buildUneditableTokens = token => {
return [...buildUneditableOpenTokens(token), buildUneditableCloseToken()];
};
import { buildUneditableOpenTokens, buildUneditableCloseToken } from './build_uneditable_token';
const isKramdownTOC = ({ type, literal }) => type === 'text' && literal === 'TOC';
const canRender = node => {
let targetNode = node;
while (targetNode !== null) {
const { firstChild } = targetNode;
const isLeaf = firstChild === null;
if (isLeaf) {
if (isKramdownTOC(targetNode)) {
return true;
}
break;
}
targetNode = targetNode.firstChild;
}
return false;
};
const render = ({ entering, origin }) =>
entering ? buildUneditableOpenTokens(origin()) : buildUneditableCloseToken();
export default { canRender, render };
import { buildUneditableTokens } from './build_uneditable_token';
const canRender = ({ literal }) => {
const kramdownRegex = /(^{:.+}$)/gm;
return kramdownRegex.test(literal);
};
const render = ({ origin }) => {
return buildUneditableTokens(origin());
};
export default { canRender, render };
......@@ -100,3 +100,11 @@
.gl-pl-7 {
padding-left: $gl-spacing-scale-7;
}
.gl-transition-property-stroke-opacity {
transition-property: stroke-opacity;
}
.gl-transition-property-stroke {
transition-property: stroke;
}
......@@ -2,19 +2,34 @@
module KnownSignIn
include Gitlab::Utils::StrongMemoize
include CookiesHelper
KNOWN_SIGN_IN_COOKIE = :known_sign_in
KNOWN_SIGN_IN_COOKIE_EXPIRY = 14.days
private
def verify_known_sign_in
return unless current_user
notify_user unless known_remote_ip?
notify_user unless known_device? || known_remote_ip?
update_cookie
end
def known_remote_ip?
known_ip_addresses.include?(request.remote_ip)
end
def known_device?
cookies.encrypted[KNOWN_SIGN_IN_COOKIE] == current_user.id
end
def update_cookie
set_secure_cookie(KNOWN_SIGN_IN_COOKIE, current_user.id,
type: COOKIE_TYPE_ENCRYPTED, httponly: true, expires: KNOWN_SIGN_IN_COOKIE_EXPIRY)
end
def sessions
strong_memoize(:session) do
ActiveSession.list(current_user).reject(&:is_impersonated)
......
......@@ -82,7 +82,7 @@ class Projects::ApplicationController < ApplicationController
end
def apply_diff_view_cookie!
set_secure_cookie(:diff_view, params.delete(:view), permanent: true) if params[:view].present?
set_secure_cookie(:diff_view, params.delete(:view), type: COOKIE_TYPE_PERMANENT) if params[:view].present?
end
def require_pages_enabled!
......
......@@ -8,6 +8,7 @@ class Projects::MergeRequests::DiffsController < Projects::MergeRequests::Applic
before_action :commit
before_action :define_diff_vars
before_action :define_diff_comment_vars, except: [:diffs_batch, :diffs_metadata]
before_action :update_diff_discussion_positions!
around_action :allow_gitaly_ref_name_caching
......@@ -171,4 +172,12 @@ class Projects::MergeRequests::DiffsController < Projects::MergeRequests::Applic
@notes.concat(draft_notes)
end
def update_diff_discussion_positions!
return unless Feature.enabled?(:merge_ref_head_comments, @merge_request.target_project, default_enabled: true)
return unless Feature.enabled?(:merge_red_head_comments_position_on_demand, @merge_request.target_project, default_enabled: true)
return if @merge_request.has_any_diff_note_positions?
Discussions::CaptureDiffNotePositionsService.new(@merge_request).execute
end
end
# frozen_string_literal: true
module CookiesHelper
def set_secure_cookie(key, value, httponly: false, permanent: false)
cookie_jar = permanent ? cookies.permanent : cookies
COOKIE_TYPE_PERMANENT = :permanent
COOKIE_TYPE_ENCRYPTED = :encrypted
cookie_jar[key] = { value: value, secure: Gitlab.config.gitlab.https, httponly: httponly }
def set_secure_cookie(key, value, httponly: false, expires: nil, type: nil)
cookie_jar = case type
when COOKIE_TYPE_PERMANENT
cookies.permanent
when COOKIE_TYPE_ENCRYPTED
cookies.encrypted
else
cookies
end
cookie_jar[key] = { value: value, secure: Gitlab.config.gitlab.https, httponly: httponly, expires: expires }
end
end
......@@ -67,6 +67,10 @@ module Noteable
false
end
def has_any_diff_note_positions?
notes.any? && DiffNotePosition.where(note: notes).exists?
end
def discussion_notes
notes
end
......
......@@ -97,29 +97,6 @@ class IssuableBaseService < BaseService
params.delete(label_key) if params[label_key].nil?
end
def filter_labels_in_param(key)
return if params[key].to_a.empty?
params[key] = available_labels.id_in(params[key]).pluck_primary_key
end
def find_or_create_label_ids
labels = params.delete(:labels)
return unless labels
params[:label_ids] = labels.map do |label_name|
label = Labels::FindOrCreateService.new(
current_user,
parent,
title: label_name.strip,
available_labels: available_labels
).execute
label.try(:id)
end.compact
end
def labels_service
@labels_service ||= ::Labels::AvailableLabelsService.new(current_user, parent, params)
end
......
---
title: Display Multiple Terraform Reports in MR Widget
merge_request: 34392
author:
type: added
---
title: "Prevents editing of non-markdown kramdown content in the Static Site Editor's WYSIWYG mode"
merge_request: 34185
author:
type: changed
---
title: Use IP or cookie in known sign-in check
merge_request: 34102
author:
type: changed
---
title: Fix missing templating vars set from URL in metrics dashboard
merge_request: 34668
author:
type: fixed
---
title: Update diff discussion positions on demand
merge_request: 34148
author:
type: added
......@@ -61,6 +61,7 @@ export default {
<gl-icon
name="issues"
:size="24"
class="class-name"
/>
</template>
```
......@@ -68,7 +69,7 @@ export default {
- **name** Name of the Icon in the SVG Sprite ([Overview is available here](https://gitlab-org.gitlab.io/gitlab-svgs)).
- **size (optional)** Number value for the size which is then mapped to a specific CSS class
(Available Sizes: 8, 12, 16, 18, 24, 32, 48, 72 are mapped to `sXX` CSS classes)
- **css-classes (optional)** Additional CSS Classes to add to the SVG tag.
- **class (optional)** Additional CSS Classes to add to the SVG tag.
### Usage in HTML/JS
......
......@@ -12,7 +12,7 @@ Some gems may not include their license information in their `gemspec` file, and
### License Finder commands
> Note: License Finder currently uses GitLab misused terms of whitelist and blacklist. As a result, the commands below references those terms. We've created an [issue on their project](https://github.com/pivotal/LicenseFinder/issues/745) to propose that they rename their commands.
> Note: License Finder currently uses GitLab misused terms of `whitelist` and `blacklist`. As a result, the commands below reference those terms. We've created an [issue on their project](https://github.com/pivotal/LicenseFinder/issues/745) to propose that they rename their commands.
There are a few basic commands License Finder provides that you'll need in order to manage license detection.
......
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -242,7 +242,7 @@ a `before_script` execution to prepare your scan job.
To pass your project's dependencies as artifacts, the dependencies must be included
in the project's working directory and specified using the `artifacts:path` configuration.
If all dependencies are present, the `-compile=false` flag can be provided to the
If all dependencies are present, the `COMPILE=false` variable can be provided to the
analyzer and compilation will be skipped:
```yaml
......@@ -267,10 +267,9 @@ build:
spotbugs-sast:
dependencies:
- build
script:
- /analyzer run -compile=false
variables:
MAVEN_REPO_PATH: ./.m2/repository
COMPILE: false
artifacts:
reports:
sast: gl-sast-report.json
......@@ -339,6 +338,7 @@ Some analyzers can be customized with environment variables.
| `SCAN_KUBERNETES_MANIFESTS` | Kubesec | Set to `"true"` to scan Kubernetes manifests. |
| `KUBESEC_HELM_CHARTS_PATH` | Kubesec | Optional path to Helm charts that `helm` will use to generate a Kubernetes manifest that `kubesec` will scan. If dependencies are defined, `helm dependency build` should be ran in a `before_script` to fetch the necessary dependencies. |
| `KUBESEC_HELM_OPTIONS` | Kubesec | Additional arguments for the `helm` executable. |
| `COMPILE` | SpotBugs | Set to `"false"` to disable project compilation and dependency fetching |
| `ANT_HOME` | SpotBugs | The `ANT_HOME` environment variable. |
| `ANT_PATH` | SpotBugs | Path to the `ant` executable. |
| `GRADLE_PATH` | SpotBugs | Path to the `gradle` executable. |
......
......@@ -821,6 +821,16 @@ user's home location (in this case the user is `root` since it runs in a
Docker container), and Maven will use the configured CI
[environment variables](../../../ci/variables/README.md#predefined-environment-variables).
### Version validation
The version string is validated using the following regex.
```ruby
\A(\.?[\w\+-]+\.?)+\z
```
You can play around with the regex and try your version strings on [this regular expression editor](https://rubular.com/r/rrLQqUXjfKEoL6).
## Troubleshooting
### Useful Maven command line options
......
......@@ -22,7 +22,7 @@ See the [authentication topic](../../topics/authentication/index.md) for more de
### Unknown sign-in
GitLab will notify you if a sign-in occurs that is from an unknown IP address.
GitLab will notify you if a sign-in occurs that is from an unknown IP address or device.
See [Unknown Sign-In Notification](unknown_sign_in_notification.md) for more details.
## User profile
......
......@@ -9,16 +9,19 @@ info: To determine the technical writer assigned to the Stage/Group associated w
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/27211) in GitLab 13.0.
When a user successfully signs in from a previously unknown IP address,
When a user successfully signs in from a previously unknown IP address or device,
GitLab notifies the user by email. In this way, GitLab proactively alerts users of potentially
malicious or unauthorized sign-ins.
There are two methods used to identify a known sign-in:
There are several methods used to identify a known sign-in. All methods must fail
for a notification email to be sent.
- Last sign-in IP: The current sign-in IP address is checked against the last sign-in
IP address.
- Current active sessions: If the user has an existing active session from the
same IP address. See [Active Sessions](active_sessions.md).
- Cookie: After successful sign in, an encrypted cookie is stored in the browser.
This cookie is set to expire 14 days after the last successful sign in.
## Example email
......
......@@ -173,6 +173,24 @@ Read through the documentation on [project settings](settings/index.md).
- [Export a project from GitLab](settings/import_export.md#exporting-a-project-and-its-data)
- [Importing and exporting projects between GitLab instances](settings/import_export.md)
## Remove a project
To remove a project, first navigate to the home page for that project.
1. Navigate to **Settings > General**.
1. Expand the **Advanced** section.
1. Scroll down to the **Remove project** section.
1. Click **Remove project**
1. Confirm this action by typing in the expected text.
### Delayed removal **(PREMIUM)**
By default, clicking to remove a project is followed by a seven day delay. Admins can restore the project during this period of time.
This delay [may be changed by an admin](../admin_area/settings/visibility_and_access_controls.md#default-deletion-adjourned-period-premium-only).
Admins can view all projects pending deletion. If you're an administrator, go to the top navigation bar, click **Projects > Your projects**, and then select the **Removed projects** tab.
From this tab an admin can restore any project.
## CI/CD for external repositories **(PREMIUM)**
Instead of importing a repository directly to GitLab, you can connect your repository
......
<script>
import { mapActions, mapState } from 'vuex';
import AuditEventsFilter from './audit_events_filter.vue';
import DateRangeField from './date_range_field.vue';
import SortingField from './sorting_field.vue';
......@@ -12,10 +13,6 @@ export default {
AuditEventsTable,
},
props: {
formPath: {
type: String,
required: true,
},
events: {
type: Array,
required: false,
......@@ -41,16 +38,11 @@ export default {
default: undefined,
},
},
data() {
return {
formElement: null,
};
computed: {
...mapState(['filterValue', 'startDate', 'endDate', 'sortBy']),
},
mounted() {
// Passing the form to child components is only temporary
// and should be changed when this issue is completed:
// https://gitlab.com/gitlab-org/gitlab/-/issues/217759
this.formElement = this.$refs.form;
methods: {
...mapActions(['setDateRange', 'setFilterValue', 'setSortBy', 'searchForAuditEvents']),
},
};
</script>
......@@ -58,25 +50,34 @@ export default {
<template>
<div>
<div class="row-content-block second-block pb-0">
<form
ref="form"
method="GET"
:path="formPath"
class="filter-form d-flex justify-content-between audit-controls row"
>
<div class="d-flex justify-content-between audit-controls row">
<div class="col-lg-auto flex-fill form-group align-items-lg-center pr-lg-8">
<audit-events-filter v-bind="{ enabledTokenTypes, qaSelector: filterQaSelector }" />
<audit-events-filter
:enabled-token-types="enabledTokenTypes"
:qa-selector="filterQaSelector"
:value="filterValue"
@selected="setFilterValue"
@submit="searchForAuditEvents"
/>
</div>
<div class="d-flex col-lg-auto flex-wrap pl-lg-0">
<div
class="audit-controls d-flex align-items-lg-center flex-column flex-lg-row col-lg-auto px-0"
>
<date-range-field v-if="formElement" :form-element="formElement" />
<sorting-field />
<date-range-field
:start-date="startDate"
:end-date="endDate"
@selected="setDateRange"
/>
<sorting-field :sort-by="sortBy" @selected="setSortBy" />
</div>
</div>
</form>
</div>
</div>
<audit-events-table v-bind="{ events, isLastPage, qaSelector: tableQaSelector }" />
<audit-events-table
:events="events"
:is-last-page="isLastPage"
:qa-selector="tableQaSelector"
/>
</div>
</template>
<script>
import { GlFilteredSearch } from '@gitlab/ui';
import { queryToObject } from '~/lib/utils/url_utility';
import { FILTER_TOKENS, AVAILABLE_TOKEN_TYPES } from '../constants';
import { availableTokensValidator } from '../validators';
......@@ -9,6 +8,11 @@ export default {
GlFilteredSearch,
},
props: {
value: {
type: Array,
required: false,
default: () => [],
},
enabledTokenTypes: {
type: Array,
required: false,
......@@ -21,14 +25,9 @@ export default {
default: undefined,
},
},
data() {
return {
searchTerms: [],
};
},
computed: {
searchTerm() {
return this.searchTerms.find(term => AVAILABLE_TOKEN_TYPES.includes(term.type));
return this.value.find(term => AVAILABLE_TOKEN_TYPES.includes(term.type));
},
enabledTokens() {
return FILTER_TOKENS.filter(token => this.enabledTokenTypes.includes(token.type));
......@@ -36,39 +35,23 @@ export default {
filterTokens() {
// This limits the user to search by only one of the available tokens
const { enabledTokens, searchTerm } = this;
if (searchTerm?.type) {
return enabledTokens.map(token => ({
...token,
disabled: searchTerm.type !== token.type,
}));
}
return enabledTokens;
},
id() {
return this.searchTerm?.value?.data;
},
type() {
return this.searchTerm?.type;
},
},
created() {
this.setSearchTermsFromQuery();
},
methods: {
// The form logic here will be removed once all the audit
// components are migrated into a single Vue application.
// https://gitlab.com/gitlab-org/gitlab/-/issues/215363
getFormElement() {
return this.$refs.input.form;
onSubmit() {
this.$emit('submit');
},
setSearchTermsFromQuery() {
const { entity_type: type, entity_id: value } = queryToObject(window.location.search);
if (type && value) {
this.searchTerms = [{ type, value: { data: value, operator: '=' } }];
}
},
filteredSearchSubmit() {
this.getFormElement().submit();
onInput(val) {
this.$emit('selected', val);
},
},
};
......@@ -81,16 +64,14 @@ export default {
:data-qa-selector="qaSelector"
>
<gl-filtered-search
v-model="searchTerms"
:value="value"
:placeholder="__('Search')"
:clear-button-title="__('Clear')"
:close-button-title="__('Close')"
:available-tokens="filterTokens"
class="gl-h-32 w-100"
@submit="filteredSearchSubmit"
@submit="onSubmit"
@input="onInput"
/>
<input ref="input" v-model="type" type="hidden" name="entity_type" />
<input v-model="id" type="hidden" name="entity_id" />
</div>
</template>
<script>
import { GlDaterangePicker } from '@gitlab/ui';
import { parsePikadayDate, pikadayToString } from '~/lib/utils/datetime_utility';
import { queryToObject } from '~/lib/utils/url_utility';
export default {
components: {
GlDaterangePicker,
},
props: {
formElement: {
type: HTMLFormElement,
required: true,
startDate: {
type: Date,
required: false,
default: null,
},
},
data() {
const data = {
startDate: null,
endDate: null,
};
const { created_after: initialStartDate, created_before: initialEndDate } = queryToObject(
window.location.search,
);
if (initialStartDate) {
data.startDate = parsePikadayDate(initialStartDate);
}
if (initialEndDate) {
data.endDate = parsePikadayDate(initialEndDate);
}
return data;
},
computed: {
createdAfter() {
return this.startDate ? pikadayToString(this.startDate) : '';
},
createdBefore() {
return this.endDate ? pikadayToString(this.endDate) : '';
endDate: {
type: Date,
required: false,
default: null,
},
},
methods: {
handleInput(dates) {
this.startDate = dates.startDate;
this.endDate = dates.endDate;
this.$nextTick(() => this.formElement.submit());
onInput(dates) {
this.$emit('selected', dates);
},
},
};
</script>
<template>
<div>
<gl-daterange-picker
class="d-flex flex-wrap flex-sm-nowrap"
:default-start-date="startDate"
:default-end-date="endDate"
start-picker-class="form-group align-items-lg-center mr-0 mr-sm-1 d-flex flex-column flex-lg-row"
end-picker-class="form-group align-items-lg-center mr-0 mr-sm-2 d-flex flex-column flex-lg-row"
@input="handleInput"
/>
<input type="hidden" name="created_after" :value="createdAfter" />
<input type="hidden" name="created_before" :value="createdBefore" />
</div>
<gl-daterange-picker
class="d-flex flex-wrap flex-sm-nowrap"
:default-start-date="startDate"
:default-end-date="endDate"
start-picker-class="form-group align-items-lg-center mr-0 mr-sm-1 d-flex flex-column flex-lg-row"
end-picker-class="form-group align-items-lg-center mr-0 mr-sm-2 d-flex flex-column flex-lg-row"
@input="onInput"
/>
</template>
<script>
import { GlNewDropdown, GlNewDropdownHeader, GlNewDropdownItem } from '@gitlab/ui';
import { setUrlParams, queryToObject } from '~/lib/utils/url_utility';
import { s__ } from '~/locale';
const SORTING_TITLE = s__('SortOptions|Sort by:');
......@@ -22,24 +20,24 @@ export default {
GlNewDropdownHeader,
GlNewDropdownItem,
},
data() {
const { sort: selectedOption } = queryToObject(window.location.search);
return {
selectedOption: selectedOption || SORTING_OPTIONS[0].key,
};
props: {
sortBy: {
type: String,
required: false,
default: null,
},
},
computed: {
selectedOptionText() {
return SORTING_OPTIONS.find(option => option.key === this.selectedOption).text;
selectedOption() {
return SORTING_OPTIONS.find(option => option.key === this.sortBy) || SORTING_OPTIONS[0];
},
},
methods: {
getItemLink(key) {
return setUrlParams({ sort: key });
onItemClick(option) {
this.$emit('selected', option);
},
isChecked(key) {
return key === this.selectedOption;
return key === this.selectedOption.key;
},
},
SORTING_TITLE,
......@@ -49,23 +47,17 @@ export default {
<template>
<div>
<gl-new-dropdown
v-model="selectedOption"
:text="selectedOptionText"
class="w-100 flex-column flex-lg-row form-group"
>
<gl-new-dropdown :text="selectedOption.text" class="w-100 flex-column flex-lg-row form-group">
<gl-new-dropdown-header> {{ $options.SORTING_TITLE }}</gl-new-dropdown-header>
<gl-new-dropdown-item
v-for="option in $options.SORTING_OPTIONS"
:key="option.key"
:is-check-item="true"
:is-checked="isChecked(option.key)"
:href="getItemLink(option.key)"
@click="onItemClick(option.key)"
>
{{ option.text }}
</gl-new-dropdown-item>
</gl-new-dropdown>
<input type="hidden" name="sort" :value="selectedOption" />
</div>
</template>
import Vue from 'vue';
import { parseBoolean } from '~/lib/utils/common_utils';
import AuditEventsApp from './components/audit_events_app.vue';
import createStore from './store';
export default selector => {
const el = document.querySelector(selector);
const {
events,
isLastPage,
formPath,
enabledTokenTypes,
filterQaSelector,
tableQaSelector,
} = el.dataset;
const { events, isLastPage, enabledTokenTypes, filterQaSelector, tableQaSelector } = el.dataset;
const store = createStore();
store.dispatch('initializeAuditEvents');
return new Vue({
el,
store,
render: createElement =>
createElement(AuditEventsApp, {
props: {
events: JSON.parse(events),
isLastPage: parseBoolean(isLastPage),
enabledTokenTypes: JSON.parse(enabledTokenTypes),
formPath,
filterQaSelector,
tableQaSelector,
},
......
......@@ -18,9 +18,8 @@ export const setDateRange = ({ commit, dispatch }, { startDate, endDate }) => {
dispatch('searchForAuditEvents');
};
export const setFilterValue = ({ commit, dispatch }, { id, type }) => {
commit(types.SET_FILTER_VALUE, { id, type });
dispatch('searchForAuditEvents');
export const setFilterValue = ({ commit }, filterValue) => {
commit(types.SET_FILTER_VALUE, filterValue);
};
export const setSortBy = ({ commit, dispatch }, sortBy) => {
......
......@@ -11,14 +11,14 @@ export default {
sort: sortBy = null,
} = {},
) {
state.filterValue = { id, type };
state.filterValue = type && id ? [{ type, value: { data: id, operator: '=' } }] : [];
state.startDate = startDate;
state.endDate = endDate;
state.sortBy = sortBy;
},
[types.SET_FILTER_VALUE](state, { id, type }) {
state.filterValue = { id, type };
[types.SET_FILTER_VALUE](state, filterValue) {
state.filterValue = filterValue;
},
[types.SET_DATE_RANGE](state, { startDate, endDate }) {
......
export default () => ({
filterValue: {
id: null,
type: null,
},
filterValue: [],
startDate: null,
endDate: null,
......
import { parsePikadayDate, pikadayToString } from '~/lib/utils/datetime_utility';
import { AVAILABLE_TOKEN_TYPES } from './constants';
export const isNumeric = str => {
return !Number.isNaN(parseInt(str, 10), 10);
......@@ -14,10 +15,16 @@ export const parseAuditEventSearchQuery = ({
created_before: createdBefore ? parsePikadayDate(createdBefore) : null,
});
export const createAuditEventSearchQuery = ({ filterValue, startDate, endDate, sortBy }) => ({
entity_id: filterValue.id,
entity_type: filterValue.type,
created_after: startDate ? pikadayToString(startDate) : null,
created_before: endDate ? pikadayToString(endDate) : null,
sort: sortBy,
});
export const createAuditEventSearchQuery = ({ filterValue, startDate, endDate, sortBy }) => {
const entityValue = filterValue.find(value => AVAILABLE_TOKEN_TYPES.includes(value.type));
return {
created_after: startDate ? pikadayToString(startDate) : null,
created_before: endDate ? pikadayToString(endDate) : null,
sort: sortBy,
entity_id: entityValue?.value.data,
entity_type: entityValue?.type,
// When changing the search parameters, we should be resetting to the first page
page: null,
};
};
......@@ -23,7 +23,7 @@ export default {
<section
v-if="hasStickySlot"
data-testid="sticky-section"
class="position-sticky gl-z-index-2 security_dashboard_filters"
class="position-sticky gl-z-index-2 security-dashboard-filters"
>
<slot name="sticky"></slot>
</section>
......
......@@ -70,10 +70,13 @@ export default {
return this.shouldShowSelection && Boolean(this.numOfSelectedVulnerabilities);
},
checkboxClass() {
return this.shouldShowSelection ? '' : 'd-none';
return this.shouldShowSelection ? '' : 'gl-display-none';
},
theadClass() {
return this.shouldShowSelectionSummary ? 'below-selection-summary' : '';
},
fields() {
const commonThClass = ['table-th-transparent', 'original-gl-th'].join(' ');
const commonThClass = ['table-th-transparent', 'original-gl-th', 'gl-bg-white!'].join(' ');
return [
{
key: 'checkbox',
......@@ -154,7 +157,7 @@ export default {
</script>
<template>
<div>
<div class="vulnerability-list">
<selection-summary
v-if="shouldShowSelectionSummary"
:selected-vulnerabilities="Object.values(selectedVulnerabilities)"
......@@ -165,6 +168,7 @@ export default {
:busy="isLoading"
:fields="fields"
:items="vulnerabilities"
:thead-class="theadClass"
stacked="sm"
show-empty
responsive
......
$security-filter-height: 90px;
$selection-summary-height: 68px;
@mixin sticky-top-positioning($extra: 0) {
top: $header-height + $extra;
.with-performance-bar & {
top: $header-height + $performance-bar-height + $extra;
}
}
.vulnerabilities-row {
&.dismissed .table-mobile-content:not(.action-buttons) {
opacity: 0.5;
......@@ -23,10 +34,26 @@
}
}
.security_dashboard_filters {
top: $header-height;
.security-dashboard-filters {
@include sticky-top-positioning();
}
.with-performance-bar & {
top: $header-height + $performance-bar-height;
// Due to position: sticky not being supported on Chrome (https://caniuse.com/#feat=css-sticky),
// the property is assigned to the th element as a workaround
.vulnerability-list {
.card,
thead th {
position: -webkit-sticky;
position: sticky;
z-index: 1;
@include sticky-top-positioning($security-filter-height);
}
thead th {
box-shadow: 0 1px $gray-100;
}
thead.below-selection-summary th {
@include sticky-top-positioning($security-filter-height + $selection-summary-height);
}
}
# frozen_string_literal: true
module Geo
class ProjectRegistryFinder
# Returns ProjectRegistry records that have never been synced.
#
# Does not care about selective sync, because it considers the Registry
# table to be the single source of truth. The contract is that other
# processes need to ensure that the table only contains records that should
# be synced.
#
# Any registries that have ever been synced that currently need to be
# resynced will be handled by other find methods (like
# #find_retryable_dirty_registries)
#
# You can pass a list with `except_ids:` so you can exclude items you
# already scheduled but haven't finished and aren't persisted to the database yet
#
# @param [Integer] batch_size used to limit the results returned
# @param [Array<Integer>] except_ids ids that will be ignored from the query
# rubocop:disable CodeReuse/ActiveRecord
def find_never_synced_registries(batch_size:, except_ids: [])
Geo::ProjectRegistry
.never_synced
.model_id_not_in(except_ids)
.limit(batch_size)
end
# rubocop:enable CodeReuse/ActiveRecord
# rubocop:disable CodeReuse/ActiveRecord
def find_retryable_dirty_registries(batch_size:, except_ids: [])
Geo::ProjectRegistry
.dirty
.retry_due
.model_id_not_in(except_ids)
.order(Gitlab::Database.nulls_first_order(:last_repository_synced_at))
.limit(batch_size)
end
# rubocop:enable CodeReuse/ActiveRecord
end
end
......@@ -11,6 +11,10 @@ class Geo::BaseRegistry < Geo::TrackingBase
where(self::MODEL_FOREIGN_KEY => range).pluck(self::MODEL_FOREIGN_KEY)
end
def self.pluck_model_foreign_key
where(nil).pluck(self::MODEL_FOREIGN_KEY)
end
def self.model_id_in(ids)
where(self::MODEL_FOREIGN_KEY => ids)
end
......
# frozen_string_literal: true
class Geo::DeletedProject
attr_reader :id, :name, :disk_path
include ActiveModel::Validations
attr_accessor :id, :name, :disk_path
validates :id, :name, :disk_path, presence: true
def initialize(id:, name:, disk_path:, repository_storage:)
@id = id
......
......@@ -3,6 +3,9 @@
class Geo::DesignRegistry < Geo::BaseRegistry
include ::Delay
MODEL_CLASS = ::Project
MODEL_FOREIGN_KEY = :project_id
RETRIES_BEFORE_REDOWNLOAD = 5
belongs_to :project
......
......@@ -5,6 +5,9 @@ class Geo::ProjectRegistry < Geo::BaseRegistry
include ::EachBatch
include ::ShaAttribute
MODEL_CLASS = ::Project
MODEL_FOREIGN_KEY = :project_id
REGISTRY_TYPES = %i{repository wiki}.freeze
RETRIES_BEFORE_REDOWNLOAD = 5
......@@ -39,6 +42,34 @@ class Geo::ProjectRegistry < Geo::BaseRegistry
where(nil).pluck(:project_id)
end
def self.registry_consistency_worker_enabled?
Feature.enabled?(:geo_project_registry_ssot_sync)
end
def self.has_create_events?
true
end
def self.find_registry_differences(range)
source_ids = Gitlab::Geo.current_node.projects.id_in(range).pluck_primary_key
tracked_ids = self.pluck_model_ids_in_range(range)
untracked_ids = source_ids - tracked_ids
unused_tracked_ids = tracked_ids - source_ids
[untracked_ids, unused_tracked_ids]
end
def self.delete_worker_class
::GeoRepositoryDestroyWorker
end
def self.delete_for_model_ids(project_ids)
project_ids.map do |project_id|
delete_worker_class.perform_async(project_id)
end
end
def self.failed
repository_sync_failed = arel_table[:repository_retry_count].gt(0)
wiki_sync_failed = arel_table[:wiki_retry_count].gt(0)
......
......@@ -36,6 +36,7 @@ class Packages::Package < ApplicationRecord
validates :version, format: { with: Gitlab::Regex.semver_regex }, if: -> { npm? || nuget? }
validates :name, format: { with: Gitlab::Regex.conan_recipe_component_regex }, if: :conan?
validates :version, format: { with: Gitlab::Regex.conan_recipe_component_regex }, if: :conan?
validates :version, format: { with: Gitlab::Regex.maven_version_regex }, if: -> { version? && maven? }
enum package_type: { maven: 1, npm: 2, conan: 3, nuget: 4, pypi: 5, composer: 6 }
......
......@@ -3,10 +3,14 @@
module Geo
class RepositoryDestroyService
include ::Gitlab::Geo::LogHelpers
include ::Gitlab::Utils::StrongMemoize
attr_reader :id, :name, :disk_path, :repository_storage
def initialize(id, name, disk_path, repository_storage)
# There is a possibility that the replicable's record does not exist
# anymore. In this case, you need to pass the optional parameters
# explicitly.
def initialize(id, name = nil, disk_path = nil, repository_storage = nil)
@id = id
@name = name
@disk_path = disk_path
......@@ -29,25 +33,36 @@ module Geo
private
def destroy_project
::Projects::DestroyService.new(deleted_project, nil).geo_replicate
# We should skip if we had to rebuild the project, but we don't
# have the information that our service class requires.
return if project.is_a?(Geo::DeletedProject) && !project.valid?
::Projects::DestroyService.new(project, nil).geo_replicate
end
# rubocop: disable CodeReuse/ActiveRecord
def destroy_registry_entries
::Geo::ProjectRegistry.where(project_id: id).delete_all
::Geo::DesignRegistry.where(project_id: id).delete_all
::Geo::ProjectRegistry.model_id_in(id).delete_all
::Geo::DesignRegistry.model_id_in(id).delete_all
log_info("Registry entries removed", project_id: id)
log_info('Registry entries removed', project_id: id)
end
# rubocop: enable CodeReuse/ActiveRecord
def deleted_project
# We don't have access to the original model anymore, so we are
# rebuilding only what our service class requires
::Geo::DeletedProject.new(id: id,
name: name,
disk_path: disk_path,
repository_storage: repository_storage)
def project
strong_memoize(:project) do
Project.find(id)
rescue ActiveRecord::RecordNotFound => e
# When cleaning up project/registries, there are some cases where
# the replicable record does not exist anymore. So, we try to
# rebuild it with only what our service class requires.
log_error('Could not find project', e.message)
::Geo::DeletedProject.new(
id: id,
name: name,
disk_path: disk_path,
repository_storage: repository_storage
)
end
end
end
end
......@@ -10,15 +10,15 @@ module Geo
{ project_id: project_id, job_id: job_id } if job_id
end
def find_project_ids_not_synced(batch_size:)
def find_project_ids_not_synced(except_ids:, batch_size:)
Geo::DesignUnsyncedFinder
.new(scheduled_project_ids: scheduled_project_ids, shard_name: shard_name, batch_size: batch_size)
.new(scheduled_project_ids: except_ids, shard_name: shard_name, batch_size: batch_size)
.execute
end
def find_project_ids_updated_recently(batch_size:)
def find_project_ids_updated_recently(except_ids:, batch_size:)
Geo::DesignUpdatedRecentlyFinder
.new(scheduled_project_ids: scheduled_project_ids, shard_name: shard_name, batch_size: batch_size)
.new(scheduled_project_ids: except_ids, shard_name: shard_name, batch_size: batch_size)
.execute
end
end
......
......@@ -62,22 +62,31 @@ module Geo
end
def load_pending_resources
resources = find_project_ids_not_synced(batch_size: db_retrieve_batch_size)
return [] unless valid_shard?
resources = find_project_ids_not_synced(except_ids: scheduled_project_ids, batch_size: db_retrieve_batch_size)
remaining_capacity = db_retrieve_batch_size - resources.size
if remaining_capacity.zero?
resources
else
resources + find_project_ids_updated_recently(batch_size: remaining_capacity)
resources + find_project_ids_updated_recently(except_ids: scheduled_project_ids + resources, batch_size: remaining_capacity)
end
end
# rubocop: disable CodeReuse/ActiveRecord
def find_project_ids_not_synced(batch_size:)
find_unsynced_projects(batch_size: batch_size)
.id_not_in(scheduled_project_ids)
.reorder(last_repository_updated_at: :desc)
.pluck_primary_key
def find_project_ids_not_synced(except_ids:, batch_size:)
if Geo::ProjectRegistry.registry_consistency_worker_enabled?
project_ids =
find_never_synced_project_ids(batch_size: batch_size, except_ids: except_ids)
find_project_ids_within_shard(project_ids, direction: :desc)
else
find_unsynced_projects(batch_size: batch_size)
.id_not_in(except_ids)
.reorder(last_repository_updated_at: :desc)
.pluck_primary_key
end
end
# rubocop: enable CodeReuse/ActiveRecord
......@@ -88,11 +97,18 @@ module Geo
end
# rubocop: disable CodeReuse/ActiveRecord
def find_project_ids_updated_recently(batch_size:)
find_projects_updated_recently(batch_size: batch_size)
.id_not_in(scheduled_project_ids)
.order('project_registry.last_repository_synced_at ASC NULLS FIRST, projects.last_repository_updated_at ASC')
.pluck_primary_key
def find_project_ids_updated_recently(except_ids:, batch_size:)
if Geo::ProjectRegistry.registry_consistency_worker_enabled?
project_ids =
find_retryable_dirty_project_ids(batch_size: batch_size, except_ids: except_ids)
find_project_ids_within_shard(project_ids, direction: :asc)
else
find_projects_updated_recently(batch_size: batch_size)
.id_not_in(except_ids)
.order('project_registry.last_repository_synced_at ASC NULLS FIRST, projects.last_repository_updated_at ASC')
.pluck_primary_key
end
end
# rubocop: enable CodeReuse/ActiveRecord
......@@ -101,5 +117,37 @@ module Geo
.new(current_node: current_node, shard_name: shard_name, batch_size: batch_size)
.execute
end
def valid_shard?
return true unless current_node.selective_sync_by_shards?
current_node.selective_sync_shards.include?(shard_name)
end
def find_never_synced_project_ids(batch_size:, except_ids:)
registry_finder
.find_never_synced_registries(batch_size: batch_size, except_ids: except_ids)
.pluck_model_foreign_key
end
def find_retryable_dirty_project_ids(batch_size:, except_ids:)
registry_finder
.find_retryable_dirty_registries(batch_size: batch_size, except_ids: except_ids)
.pluck_model_foreign_key
end
# rubocop:disable CodeReuse/ActiveRecord
def find_project_ids_within_shard(project_ids, direction:)
Project
.id_in(project_ids)
.within_shards(shard_name)
.reorder(last_repository_updated_at: direction)
.pluck_primary_key
end
# rubocop:enable CodeReuse/ActiveRecord
def registry_finder
@registry_finder ||= Geo::ProjectRegistryFinder.new
end
end
end
......@@ -18,8 +18,9 @@ module Geo
REGISTRY_CLASSES = [
Geo::JobArtifactRegistry,
Geo::LfsObjectRegistry,
Geo::UploadRegistry,
Geo::PackageFileRegistry
Geo::PackageFileRegistry,
Geo::ProjectRegistry,
Geo::UploadRegistry
].freeze
BATCH_SIZE = 1000
......
......@@ -3,10 +3,13 @@
class GeoRepositoryDestroyWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include GeoQueue
include ::Gitlab::Geo::LogHelpers
loggable_arguments 1, 2, 3
def perform(id, name, disk_path, storage_name)
def perform(id, name = nil, disk_path = nil, storage_name = nil)
log_info('Executing Geo::RepositoryDestroyService', id: id, name: name, disk_path: disk_path, storage_name: storage_name)
Geo::RepositoryDestroyService.new(id, name, disk_path, storage_name).execute
end
end
---
title: Pin selection summary/list header to the page top
merge_request: 33875
author:
type: added
---
title: Add validation to maven package version
merge_request: 32925
author: Bola Ahmed Buari
type: added
......@@ -162,9 +162,10 @@ RSpec.describe 'Admin::AuditLogs', :js do
end
def filter_for(type, name)
within '[data-qa-selector="admin_audit_log_filter"]' do
find('input').click
filter_container = '[data-testid="audit-events-filter"]'
find(filter_container).click
within filter_container do
click_link type
click_link name
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Geo::ProjectRegistryFinder, :geo do
let_it_be(:project_1) { create(:project) }
let_it_be(:project_2) { create(:project) }
let_it_be(:project_3) { create(:project) }
let_it_be(:project_4) { create(:project) }
let_it_be(:project_5) { create(:project) }
let_it_be(:project_6) { create(:project) }
let_it_be(:registry_project_1) { create(:geo_project_registry, :synced, project_id: project_1.id) }
let_it_be(:registry_project_2) { create(:geo_project_registry, :sync_failed, project_id: project_2.id) }
let_it_be(:registry_project_3) { create(:geo_project_registry, project_id: project_3.id) }
let_it_be(:registry_project_4) { create(:geo_project_registry, :repository_dirty, project_id: project_4.id, last_repository_synced_at: 2.days.ago) }
let_it_be(:registry_project_5) { create(:geo_project_registry, :wiki_dirty, project_id: project_5.id, last_repository_synced_at: 5.days.ago) }
let_it_be(:registry_project_6) { create(:geo_project_registry, project_id: project_6.id) }
describe '#find_never_synced_registries' do
it 'returns registries for projects that have never been synced' do
registries = subject.find_never_synced_registries(batch_size: 10)
expect(registries).to match_ids(registry_project_3, registry_project_6)
end
it 'excludes except_ids' do
registries = subject.find_never_synced_registries(batch_size: 10, except_ids: [project_3.id])
expect(registries).to match_ids(registry_project_6)
end
end
describe '#find_retryable_dirty_registries' do
it 'returns registries for projects that have been recently updated or that have never been synced' do
registries = subject.find_retryable_dirty_registries(batch_size: 10)
expect(registries).to match_ids(registry_project_2, registry_project_3, registry_project_4, registry_project_5, registry_project_6)
end
it 'excludes except_ids' do
registries = subject.find_retryable_dirty_registries(batch_size: 10, except_ids: [project_4.id, project_5.id, project_6.id])
expect(registries).to match_ids(registry_project_2, registry_project_3)
end
end
end
......@@ -5,10 +5,8 @@ exports[`AuditEventsApp when initialized matches the snapshot 1`] = `
<div
class="row-content-block second-block pb-0"
>
<form
class="filter-form d-flex justify-content-between audit-controls row"
method="GET"
path="form/path"
<div
class="d-flex justify-content-between audit-controls row"
>
<div
class="col-lg-auto flex-fill form-group align-items-lg-center pr-lg-8"
......@@ -24,17 +22,7 @@ exports[`AuditEventsApp when initialized matches the snapshot 1`] = `
clearbuttontitle="Clear"
close-button-title="Close"
placeholder="Search"
value=""
/>
<input
name="entity_type"
type="hidden"
/>
<input
name="entity_id"
type="hidden"
value="[object Object]"
/>
</div>
</div>
......@@ -46,13 +34,16 @@ exports[`AuditEventsApp when initialized matches the snapshot 1`] = `
class="audit-controls d-flex align-items-lg-center flex-column flex-lg-row col-lg-auto px-0"
>
<date-range-field-stub
formelement="[object HTMLFormElement]"
enddate="Sun Feb 02 2020 00:00:00 GMT+0000 (Greenwich Mean Time)"
startdate="Wed Jan 01 2020 00:00:00 GMT+0000 (Greenwich Mean Time)"
/>
<sorting-field-stub />
<sorting-field-stub
sortby="created_asc"
/>
</div>
</div>
</form>
</div>
</div>
<audit-events-table-stub
......
......@@ -2,12 +2,20 @@ import { shallowMount } from '@vue/test-utils';
import AuditEventsApp from 'ee/audit_events/components/audit_events_app.vue';
import DateRangeField from 'ee/audit_events/components/date_range_field.vue';
import SortingField from 'ee/audit_events/components/sorting_field.vue';
import AuditEventsTable from 'ee/audit_events/components/audit_events_table.vue';
import AuditEventsFilter from 'ee/audit_events/components/audit_events_filter.vue';
import { AVAILABLE_TOKEN_TYPES } from 'ee/audit_events/constants';
import createStore from 'ee/audit_events/store';
const TEST_SORT_BY = 'created_asc';
const TEST_START_DATE = new Date('2020-01-01');
const TEST_END_DATE = new Date('2020-02-02');
const TEST_FILTER_VALUE = [{ id: 50, type: 'User' }];
describe('AuditEventsApp', () => {
let wrapper;
let store;
const events = [{ foo: 'bar' }];
const enabledTokenTypes = AVAILABLE_TOKEN_TYPES;
......@@ -16,8 +24,8 @@ describe('AuditEventsApp', () => {
const initComponent = (props = {}) => {
wrapper = shallowMount(AuditEventsApp, {
store,
propsData: {
formPath: 'form/path',
isLastPage: true,
filterQaSelector,
tableQaSelector,
......@@ -31,9 +39,20 @@ describe('AuditEventsApp', () => {
});
};
beforeEach(() => {
store = createStore();
Object.assign(store.state, {
startDate: TEST_START_DATE,
endDate: TEST_END_DATE,
sortBy: TEST_SORT_BY,
filterValue: TEST_FILTER_VALUE,
});
});
afterEach(() => {
wrapper.destroy();
wrapper = null;
store = null;
});
describe('when initialized', () => {
......@@ -45,25 +64,51 @@ describe('AuditEventsApp', () => {
expect(wrapper.element).toMatchSnapshot();
});
it('sets the form element on the date range field', () => {
const { element } = wrapper.find('form');
expect(wrapper.find(DateRangeField).props('formElement')).toEqual(element);
it('renders audit events table', () => {
expect(wrapper.find(AuditEventsTable).props()).toEqual({
events,
qaSelector: tableQaSelector,
isLastPage: true,
});
});
it('renders audit events filter', () => {
expect(wrapper.find(AuditEventsFilter).props()).toEqual({
enabledTokenTypes,
qaSelector: filterQaSelector,
value: TEST_FILTER_VALUE,
});
});
it('passes its events property to the logs table', () => {
expect(wrapper.find(AuditEventsTable).props('events')).toEqual(events);
it('renders date range field', () => {
expect(wrapper.find(DateRangeField).props()).toEqual({
startDate: TEST_START_DATE,
endDate: TEST_END_DATE,
});
});
it('passes the tables QA selector to the logs table', () => {
expect(wrapper.find(AuditEventsTable).props('qaSelector')).toEqual(tableQaSelector);
it('renders sorting field', () => {
expect(wrapper.find(SortingField).props()).toEqual({ sortBy: TEST_SORT_BY });
});
});
it('passes its available token types to the logs filter', () => {
expect(wrapper.find(AuditEventsFilter).props('enabledTokenTypes')).toEqual(enabledTokenTypes);
describe('when a field is selected', () => {
beforeEach(() => {
jest.spyOn(store, 'dispatch').mockImplementation();
initComponent();
});
it('passes the filters QA selector to the logs filter', () => {
expect(wrapper.find(AuditEventsFilter).props('qaSelector')).toEqual(filterQaSelector);
it.each`
name | field | action | payload
${'date range'} | ${DateRangeField} | ${'setDateRange'} | ${'test'}
${'sort by'} | ${SortingField} | ${'setSortBy'} | ${'test'}
${'events filter'} | ${AuditEventsFilter} | ${'setFilterValue'} | ${'test'}
`('for $name, it calls $handler', ({ field, action, payload }) => {
expect(store.dispatch).not.toHaveBeenCalled();
wrapper.find(field).vm.$emit('selected', payload);
expect(store.dispatch).toHaveBeenCalledWith(action, payload);
});
});
});
......@@ -6,9 +6,8 @@ import { AVAILABLE_TOKEN_TYPES } from 'ee/audit_events/constants';
describe('AuditEventsFilter', () => {
let wrapper;
const formElement = document.createElement('form');
formElement.submit = jest.fn();
const value = [{ type: 'Project', value: { data: 1, operator: '=' } }];
const findFilteredSearch = () => wrapper.find(GlFilteredSearch);
const getAvailableTokens = () => findFilteredSearch().props('availableTokens');
const getAvailableTokenProps = type =>
......@@ -19,9 +18,6 @@ describe('AuditEventsFilter', () => {
propsData: {
...props,
},
methods: {
getFormElement: () => formElement,
},
});
};
......@@ -46,74 +42,59 @@ describe('AuditEventsFilter', () => {
});
});
describe('when the URL query has a search term', () => {
const type = 'User';
const id = '1';
describe('when the default token value is set', () => {
beforeEach(() => {
delete window.location;
window.location = { search: `entity_type=${type}&entity_id=${id}` };
initComponent();
initComponent({ value });
});
it('sets the filtered searched token', () => {
expect(findFilteredSearch().props('value')).toMatchObject([
{
type,
value: {
data: id,
},
},
]);
expect(findFilteredSearch().props('value')).toEqual(value);
});
});
describe('when the URL query is empty', () => {
beforeEach(() => {
delete window.location;
window.location = { search: '' };
initComponent();
it('only one token matching the selected token type is enabled', () => {
expect(getAvailableTokenProps('Project').disabled).toEqual(false);
expect(getAvailableTokenProps('Group').disabled).toEqual(true);
expect(getAvailableTokenProps('User').disabled).toEqual(true);
});
it('has an empty search value', () => {
expect(findFilteredSearch().vm.value).toEqual([]);
describe('and the user submits the search field', () => {
beforeEach(() => {
findFilteredSearch().vm.$emit('submit');
});
it('should emit the "submit" event', () => {
expect(wrapper.emitted().submit).toHaveLength(1);
});
});
});
describe('when submitting the filtered search', () => {
describe('when the default token value is not set', () => {
beforeEach(() => {
initComponent();
findFilteredSearch().vm.$emit('submit');
});
it("calls submit on this component's FORM element", () => {
expect(formElement.submit).toHaveBeenCalledWith();
it('has an empty search value', () => {
expect(findFilteredSearch().vm.value).toEqual([]);
});
});
describe('when a search token has been selected', () => {
const searchTerm = {
value: { data: '1' },
type: 'Project',
};
beforeEach(() => {
initComponent();
wrapper.setData({
searchTerms: [searchTerm],
describe('and the user inputs nothing into the search field', () => {
beforeEach(() => {
findFilteredSearch().vm.$emit('input', []);
});
});
it('only one token matching the selected type is available', () => {
expect(getAvailableTokenProps('Project').disabled).toEqual(false);
expect(getAvailableTokenProps('Group').disabled).toEqual(true);
expect(getAvailableTokenProps('User').disabled).toEqual(true);
});
it('should emit the "selected" event with empty values', () => {
expect(wrapper.emitted().selected[0]).toEqual([[]]);
});
describe('and the user submits the search field', () => {
beforeEach(() => {
findFilteredSearch().vm.$emit('submit');
});
it('sets the input values according to the search term', () => {
expect(wrapper.find('input[name="entity_type"]').attributes().value).toEqual(searchTerm.type);
expect(wrapper.find('input[name="entity_id"]').attributes().value).toEqual(
searchTerm.value.data,
);
it('should emit the "submit" event', () => {
expect(wrapper.emitted().submit).toHaveLength(1);
});
});
});
});
......
......@@ -5,81 +5,61 @@ import DateRangeField from 'ee/audit_events/components/date_range_field.vue';
import { parsePikadayDate } from '~/lib/utils/datetime_utility';
describe('DateRangeField component', () => {
const DATE = '1970-01-01';
let wrapper;
const createComponent = (props = {}) => {
const formElement = document.createElement('form');
document.body.appendChild(formElement);
const startDate = parsePikadayDate('2020-03-13');
const endDate = parsePikadayDate('2020-03-14');
return shallowMount(DateRangeField, {
propsData: { formElement, ...props },
const createComponent = (props = {}) => {
wrapper = shallowMount(DateRangeField, {
propsData: { ...props },
});
};
beforeEach(() => {
delete window.location;
window.location = { search: '' };
});
afterEach(() => {
document.querySelector('form').remove();
wrapper.destroy();
wrapper = null;
});
it('should populate the initial start date if passed in the query string', () => {
window.location.search = `?created_after=${DATE}`;
wrapper = createComponent();
it('passes the startDate to the date picker as defaultStartDate', () => {
createComponent({ startDate });
expect(wrapper.find(GlDaterangePicker).props()).toMatchObject({
defaultStartDate: parsePikadayDate(DATE),
defaultStartDate: startDate,
defaultEndDate: null,
});
});
it('should populate the initial end date if passed in the query string', () => {
window.location.search = `?created_before=${DATE}`;
wrapper = createComponent();
it('passes the endDate to the date picker as defaultEndDate', () => {
createComponent({ endDate });
expect(wrapper.find(GlDaterangePicker).props()).toMatchObject({
defaultStartDate: null,
defaultEndDate: parsePikadayDate(DATE),
defaultEndDate: endDate,
});
});
it('should populate both the initial start and end dates if passed in the query string', () => {
window.location.search = `?created_after=${DATE}&created_before=${DATE}`;
wrapper = createComponent();
it('passes both startDate and endDate to the date picker as default dates', () => {
createComponent({ startDate, endDate });
expect(wrapper.find(GlDaterangePicker).props()).toMatchObject({
defaultStartDate: parsePikadayDate(DATE),
defaultEndDate: parsePikadayDate(DATE),
defaultStartDate: startDate,
defaultEndDate: endDate,
});
});
it('should populate the date hidden fields on input', () => {
wrapper = createComponent();
wrapper
.find(GlDaterangePicker)
.vm.$emit('input', { startDate: parsePikadayDate(DATE), endDate: parsePikadayDate(DATE) });
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.find('input[name="created_after"]').attributes().value).toEqual(DATE);
expect(wrapper.find('input[name="created_before"]').attributes().value).toEqual(DATE);
});
});
it('should submit the form on input change', () => {
wrapper = createComponent();
const spy = jest.spyOn(wrapper.props().formElement, 'submit');
wrapper
.find(GlDaterangePicker)
.vm.$emit('input', { startDate: parsePikadayDate(DATE), endDate: parsePikadayDate(DATE) });
return wrapper.vm.$nextTick().then(() => {
expect(spy).toHaveBeenCalledTimes(1);
it('should emit the "selected" event with startDate and endDate on input change', () => {
createComponent();
wrapper.find(GlDaterangePicker).vm.$emit('input', { startDate, endDate });
return wrapper.vm.$nextTick(() => {
expect(wrapper.emitted().selected).toBeTruthy();
expect(wrapper.emitted().selected[0]).toEqual([
{
startDate,
endDate,
},
]);
});
});
});
import { shallowMount } from '@vue/test-utils';
import { GlNewDropdownItem } from '@gitlab/ui';
import * as urlUtils from '~/lib/utils/url_utility';
import SortingField from 'ee/audit_events/components/sorting_field.vue';
describe('SortingField component', () => {
let wrapper;
const DUMMY_URL = 'https://localhost';
const createComponent = () =>
shallowMount(SortingField, { stubs: { GlNewDropdown: true, GlNewDropdownItem: true } });
const initComponent = (props = {}) => {
wrapper = shallowMount(SortingField, {
propsData: { ...props },
stubs: {
GlNewDropdown: true,
GlNewDropdownItem: true,
},
});
};
const getCheckedOptions = () =>
wrapper.findAll(GlNewDropdownItem).filter(item => item.props().isChecked);
const getCheckedOptionHref = () => {
return getCheckedOptions()
.at(0)
.attributes().href;
};
beforeEach(() => {
urlUtils.setUrlParams = jest.fn(({ sort }) => `${DUMMY_URL}/?sort=${sort}`);
wrapper = createComponent();
initComponent();
});
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
describe('Sorting behaviour', () => {
describe('when initialized', () => {
it('should have sorting options', () => {
expect(wrapper.findAll(GlNewDropdownItem)).toHaveLength(2);
});
it('should set the sorting option to `created_desc` by default', () => {
expect(getCheckedOptions()).toHaveLength(1);
expect(getCheckedOptionHref()).toBe(`${DUMMY_URL}/?sort=created_desc`);
});
it('should get the sorting option from the URL', () => {
urlUtils.queryToObject = jest.fn(() => ({ sort: 'created_asc' }));
wrapper = createComponent();
describe('with a sortBy value', () => {
beforeEach(() => {
initComponent({
sortBy: 'created_asc',
});
});
expect(getCheckedOptions()).toHaveLength(1);
expect(getCheckedOptionHref()).toBe(`${DUMMY_URL}/?sort=created_asc`);
it('should set the sorting option accordingly', () => {
expect(getCheckedOptions()).toHaveLength(1);
expect(
getCheckedOptions()
.at(0)
.text(),
).toEqual('Oldest created');
});
});
});
it('should retain other params when creating the option URL', () => {
urlUtils.setUrlParams = jest.fn(({ sort }) => `${DUMMY_URL}/?abc=defg&sort=${sort}`);
urlUtils.queryToObject = jest.fn(() => ({ sort: 'created_desc', abc: 'defg' }));
wrapper = createComponent();
describe('when the user clicks on a option', () => {
beforeEach(() => {
initComponent();
wrapper
.findAll(GlNewDropdownItem)
.at(1)
.vm.$emit('click');
});
expect(getCheckedOptionHref()).toBe(`${DUMMY_URL}/?abc=defg&sort=created_desc`);
it('should emit the "selected" event with clicked option', () => {
expect(wrapper.emitted().selected).toBeTruthy();
expect(wrapper.emitted().selected[0]).toEqual(['created_asc']);
});
});
});
......@@ -18,10 +18,9 @@ describe('Audit Event actions', () => {
});
it.each`
action | type | payload
${'setDateRange'} | ${types.SET_DATE_RANGE} | ${{ startDate, endDate }}
${'setFilterValue'} | ${types.SET_FILTER_VALUE} | ${{ id: '1', type: 'user' }}
${'setSortBy'} | ${types.SET_SORT_BY} | ${'created_asc'}
action | type | payload
${'setDateRange'} | ${types.SET_DATE_RANGE} | ${{ startDate, endDate }}
${'setSortBy'} | ${types.SET_SORT_BY} | ${'created_asc'}
`(
'$action should commit $type with $payload and dispatches "searchForAuditEvents"',
({ action, type, payload }) => {
......@@ -40,6 +39,11 @@ describe('Audit Event actions', () => {
},
);
it('setFilterValue action should commit to the store', () => {
const payload = [{ type: 'User', value: { data: 1, operator: '=' } }];
testAction(actions.setFilterValue, payload, state, [{ type: types.SET_FILTER_VALUE, payload }]);
});
describe('searchForAuditEvents', () => {
let spy;
......
......@@ -15,10 +15,10 @@ describe('Audit Event mutations', () => {
});
it.each`
mutation | payload | expectedState
${types.SET_FILTER_VALUE} | ${{ id: '1', type: 'user' }} | ${{ filterValue: { id: '1', type: 'user' } }}
${types.SET_DATE_RANGE} | ${{ startDate, endDate }} | ${{ startDate, endDate }}
${types.SET_SORT_BY} | ${'created_asc'} | ${{ sortBy: 'created_asc' }}
mutation | payload | expectedState
${types.SET_FILTER_VALUE} | ${[{ type: 'User', value: { data: 1, operator: '=' } }]} | ${{ filterValue: [{ type: 'User', value: { data: 1, operator: '=' } }] }}
${types.SET_DATE_RANGE} | ${{ startDate, endDate }} | ${{ startDate, endDate }}
${types.SET_SORT_BY} | ${'created_asc'} | ${{ sortBy: 'created_asc' }}
`(
'$mutation with payload $payload will update state with $expectedState',
({ mutation, payload, expectedState }) => {
......@@ -32,7 +32,7 @@ describe('Audit Event mutations', () => {
describe(`${types.INITIALIZE_AUDIT_EVENTS}`, () => {
const payload = {
entity_id: '1',
entity_type: 'user',
entity_type: 'User',
created_after: startDate,
created_before: endDate,
sort: 'created_asc',
......@@ -40,7 +40,7 @@ describe('Audit Event mutations', () => {
it.each`
stateKey | expectedState
${'filterValue'} | ${{ id: payload.entity_id, type: payload.entity_type }}
${'filterValue'} | ${[{ type: payload.entity_type, value: { data: payload.entity_id, operator: '=' } }]}
${'startDate'} | ${payload.created_after}
${'endDate'} | ${payload.created_before}
${'sortBy'} | ${payload.sort}
......
......@@ -8,6 +8,7 @@ describe('Audit Event Utils', () => {
created_before: '2020-04-13',
sortBy: 'created_asc',
};
expect(parseAuditEventSearchQuery(input)).toEqual({
created_after: new Date('2020-03-13'),
created_before: new Date('2020-04-13'),
......@@ -19,20 +20,19 @@ describe('Audit Event Utils', () => {
describe('createAuditEventSearchQuery', () => {
it('returns a query object with remapped keys and stringified dates', () => {
const input = {
filterValue: {
id: '1',
type: 'user',
},
filterValue: [{ type: 'User', value: { data: '1', operator: '=' } }],
startDate: new Date('2020-03-13'),
endDate: new Date('2020-04-13'),
sortBy: 'bar',
};
expect(createAuditEventSearchQuery(input)).toEqual({
entity_id: '1',
entity_type: 'user',
entity_type: 'User',
created_after: '2020-03-13',
created_before: '2020-04-13',
sort: 'bar',
page: null,
});
});
});
......
......@@ -74,8 +74,8 @@ describe('Vulnerability list component', () => {
});
it('should not show the checkboxes if shouldShowSelection is passed in', () => {
expect(findCheckAllCheckboxCell().classes()).toContain('d-none');
expect(findFirstCheckboxCell().classes()).toContain('d-none');
expect(findCheckAllCheckboxCell().classes()).toContain('gl-display-none');
expect(findFirstCheckboxCell().classes()).toContain('gl-display-none');
});
});
......
......@@ -18,9 +18,13 @@ RSpec.describe Gitlab::Auth::GroupSaml::FailureHandler do
'omniauth.error.strategy' => strategy,
'devise.mapping' => Devise.mappings[:user],
'warden' => warden,
'action_dispatch.key_generator' => ActiveSupport::KeyGenerator.new('b2efbaccbdb9548217eebc73a896db73'), # necessary for setting signed cookies in lib/gitlab/experimentation.rb
'action_dispatch.signed_cookie_salt' => 'a4fb52b0ccb302eaef92bda18fedf5c3', # necessary for setting signed cookies in lib/gitlab/experimentation.rb
'action_dispatch.cookies_rotations' => OpenStruct.new(signed: []) # necessary for setting signed cookies in lib/gitlab/experimentation.rb
# The following are necessary for setting signed/encrypted cookies such as in
# lib/gitlab/experimentation.rb or app/controllers/concerns/known_sign_in.rb
'action_dispatch.key_generator' => ActiveSupport::KeyGenerator.new('b2efbaccbdb9548217eebc73a896db73'),
'action_dispatch.signed_cookie_salt' => 'a4fb52b0ccb302eaef92bda18fedf5c3',
'action_dispatch.encrypted_signed_cookie_salt' => 'a4fb52b0ccb302eaef92bda18fedf5c3',
'action_dispatch.encrypted_cookie_salt' => 'a4fb52b0ccb302eaef92bda18fedf5c3',
'action_dispatch.cookies_rotations' => OpenStruct.new(signed: [], encrypted: [])
}
Rack::MockRequest.env_for(path, params)
end
......
......@@ -2,9 +2,11 @@
require 'spec_helper'
RSpec.describe Geo::DeletedProject, type: :model do
RSpec.describe Geo::DeletedProject, :geo, type: :model do
include StubConfiguration
subject { described_class.new(id: 1, name: 'sample', disk_path: 'root/sample', repository_storage: 'foo') }
before do
storages = {
'foo' => { 'path' => 'tmp/tests/storage_foo' },
......@@ -14,11 +16,23 @@ RSpec.describe Geo::DeletedProject, type: :model do
stub_storage_settings(storages)
end
subject { described_class.new(id: 1, name: 'sample', disk_path: 'root/sample', repository_storage: 'foo') }
describe 'attributes' do
it { is_expected.to respond_to(:id) }
it { is_expected.to respond_to(:name) }
it { is_expected.to respond_to(:disk_path) }
end
it { is_expected.to respond_to(:id) }
it { is_expected.to respond_to(:name) }
it { is_expected.to respond_to(:disk_path) }
describe 'validations' do
it { is_expected.to validate_presence_of(:id) }
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_presence_of(:disk_path) }
end
describe 'attributes' do
it { is_expected.to respond_to(:id) }
it { is_expected.to respond_to(:name) }
it { is_expected.to respond_to(:disk_path) }
end
describe '#full_path' do
it 'is an alias for disk_path' do
......
......@@ -25,6 +25,154 @@ RSpec.describe Geo::ProjectRegistry, :geo_fdw do
it { is_expected.to validate_uniqueness_of(:project) }
end
describe '.find_registry_differences' do
let!(:secondary) { create(:geo_node) }
let!(:synced_group) { create(:group) }
let!(:nested_group) { create(:group, parent: synced_group) }
let!(:project_1) { create(:project, group: synced_group) }
let!(:project_2) { create(:project, group: nested_group) }
let!(:project_3) { create(:project) }
let!(:project_4) { create(:project) }
let!(:project_5) { create(:project, :broken_storage) }
let!(:project_6) { create(:project, :broken_storage) }
before do
stub_current_geo_node(secondary)
end
context 'untracked IDs' do
before do
create(:geo_project_registry, project_id: project_1.id)
create(:geo_project_registry, :sync_failed, project_id: project_3.id)
create(:geo_project_registry, project_id: project_5.id)
end
it 'includes project IDs without an entry on the tracking database' do
range = Project.minimum(:id)..Project.maximum(:id)
untracked_ids, _ = described_class.find_registry_differences(range)
expect(untracked_ids).to match_array([project_2.id, project_4.id, project_6.id])
end
it 'excludes projects outside the ID range' do
untracked_ids, _ = described_class.find_registry_differences(project_4.id..project_6.id)
expect(untracked_ids).to match_array([project_4.id, project_6.id])
end
context 'with selective sync by namespace' do
let(:secondary) { create(:geo_node, selective_sync_type: 'namespaces', namespaces: [synced_group]) }
it 'excludes project IDs that are not in selectively synced projects' do
range = Project.minimum(:id)..Project.maximum(:id)
untracked_ids, _ = described_class.find_registry_differences(range)
expect(untracked_ids).to match_array([project_2.id])
end
end
context 'with selective sync by shard' do
let(:secondary) { create(:geo_node, selective_sync_type: 'shards', selective_sync_shards: ['broken']) }
it 'excludes project IDs that are not in selectively synced projects' do
range = Project.minimum(:id)..Project.maximum(:id)
untracked_ids, _ = described_class.find_registry_differences(range)
expect(untracked_ids).to match_array([project_6.id])
end
end
end
context 'unused tracked IDs' do
context 'with an orphaned registry' do
let!(:orphaned) { create(:geo_project_registry, project_id: project_1.id) }
before do
project_1.delete
end
it 'includes tracked IDs that do not exist in the model table' do
range = project_1.id..project_1.id
_, unused_tracked_ids = described_class.find_registry_differences(range)
expect(unused_tracked_ids).to match_array([project_1.id])
end
it 'excludes IDs outside the ID range' do
range = (project_1.id + 1)..Project.maximum(:id)
_, unused_tracked_ids = described_class.find_registry_differences(range)
expect(unused_tracked_ids).to be_empty
end
end
context 'with selective sync by namespace' do
let(:secondary) { create(:geo_node, selective_sync_type: 'namespaces', namespaces: [synced_group]) }
context 'with a tracked project' do
context 'excluded from selective sync' do
let!(:registry_entry) { create(:geo_project_registry, project_id: project_3.id) }
it 'includes tracked project IDs that exist but are not in a selectively synced project' do
range = project_3.id..project_3.id
_, unused_tracked_ids = described_class.find_registry_differences(range)
expect(unused_tracked_ids).to match_array([project_3.id])
end
end
context 'included in selective sync' do
let!(:registry_entry) { create(:geo_project_registry, project_id: project_1.id) }
it 'excludes tracked project IDs that are in selectively synced projects' do
range = project_1.id..project_1.id
_, unused_tracked_ids = described_class.find_registry_differences(range)
expect(unused_tracked_ids).to be_empty
end
end
end
end
context 'with selective sync by shard' do
let(:secondary) { create(:geo_node, selective_sync_type: 'shards', selective_sync_shards: ['broken']) }
context 'with a tracked project' do
let!(:registry_entry) { create(:geo_project_registry, project_id: project_1.id) }
context 'excluded from selective sync' do
it 'includes tracked project IDs that exist but are not in a selectively synced project' do
range = project_1.id..project_1.id
_, unused_tracked_ids = described_class.find_registry_differences(range)
expect(unused_tracked_ids).to match_array([project_1.id])
end
end
context 'included in selective sync' do
let!(:registry_entry) { create(:geo_project_registry, project_id: project_5.id) }
it 'excludes tracked project IDs that are in selectively synced projects' do
range = project_5.id..project_5.id
_, unused_tracked_ids = described_class.find_registry_differences(range)
expect(unused_tracked_ids).to be_empty
end
end
end
end
end
end
describe '.synced_repos' do
it 'returns clean projects where last attempt to sync succeeded' do
expected = []
......
......@@ -141,6 +141,34 @@ RSpec.describe Packages::Package, type: :model do
it { is_expected.not_to allow_value('%2e%2e%2f1.2.3').for(:version) }
end
context 'maven package' do
subject { create(:maven_package) }
it { is_expected.to allow_value('0').for(:version) }
it { is_expected.to allow_value('1').for(:version) }
it { is_expected.to allow_value('10').for(:version) }
it { is_expected.to allow_value('1.0').for(:version) }
it { is_expected.to allow_value('1.3.350.v20200505-1744').for(:version) }
it { is_expected.to allow_value('1.1-beta-2').for(:version) }
it { is_expected.to allow_value('1.2-SNAPSHOT').for(:version) }
it { is_expected.to allow_value('12.1.2-2-1').for(:version) }
it { is_expected.to allow_value('1.2.3..beta').for(:version) }
it { is_expected.to allow_value('1.2.3-beta').for(:version) }
it { is_expected.to allow_value('10.2.3-beta').for(:version) }
it { is_expected.to allow_value('2.0.0.v200706041905-7C78EK9E_EkMNfNOd2d8qq').for(:version) }
it { is_expected.to allow_value('1.2-alpha-1-20050205.060708-1').for(:version) }
it { is_expected.to allow_value('703220b4e2cea9592caeb9f3013f6b1e5335c293').for(:version) }
it { is_expected.to allow_value('RELEASE').for(:version) }
it { is_expected.not_to allow_value('..1.2.3').for(:version) }
it { is_expected.not_to allow_value(' 1.2.3').for(:version) }
it { is_expected.not_to allow_value("1.2.3 \r\t").for(:version) }
it { is_expected.not_to allow_value("\r\t 1.2.3").for(:version) }
it { is_expected.not_to allow_value('1.2.3-4/../../').for(:version) }
it { is_expected.not_to allow_value('1.2.3-4%2e%2e%').for(:version) }
it { is_expected.not_to allow_value('../../../../../1.2.3').for(:version) }
it { is_expected.not_to allow_value('%2e%2e%2f1.2.3').for(:version) }
end
it_behaves_like 'validating version to be SemVer compliant for', :npm_package
it_behaves_like 'validating version to be SemVer compliant for', :nuget_package
end
......
......@@ -46,6 +46,10 @@ RSpec.describe Geo::RegistryConsistencyService, :geo, :use_clean_rails_memory_st
expect(registry_class).to respond_to(:delete_for_model_ids)
end
it 'responds to .find_registry_differences' do
expect(registry_class).to respond_to(:find_registry_differences)
end
it 'responds to .has_create_events?' do
expect(registry_class).to respond_to(:has_create_events?)
end
......
......@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Geo::RepositoryDestroyService do
RSpec.describe Geo::RepositoryDestroyService, :geo do
include ::EE::GeoHelpers
let_it_be(:secondary) { create(:geo_node) }
......@@ -128,5 +128,49 @@ RSpec.describe Geo::RepositoryDestroyService do
expect(Geo::DesignRegistry.where(project: project)).to be_empty
end
end
context 'with an unused registry' do
let!(:project) { create(:project_empty_repo, :legacy_storage) }
let!(:unused_project_registry) { create(:geo_project_registry, project_id: project.id) }
let!(:unused_design_registry) { create(:geo_design_registry, project_id: project.id) }
subject(:service) { described_class.new(project.id) }
context 'when the replicable model does not exist' do
before do
project.delete
end
it 'does not delegate project removal to Projects::DestroyService' do
expect_any_instance_of(EE::Projects::DestroyService).not_to receive(:geo_replicate)
service.execute
end
it 'removes the registry entries' do
service.execute
expect(Geo::ProjectRegistry.where(project: project)).to be_empty
expect(Geo::DesignRegistry.where(project: project)).to be_empty
end
end
context 'when the replicable model exists' do
subject(:service) { described_class.new(project.id) }
it 'delegates project removal to Projects::DestroyService' do
expect_any_instance_of(EE::Projects::DestroyService).to receive(:geo_replicate)
service.execute
end
it 'removes the registry entries' do
service.execute
expect(Geo::ProjectRegistry.where(project: project)).to be_empty
expect(Geo::DesignRegistry.where(project: project)).to be_empty
end
end
end
end
end
......@@ -76,13 +76,15 @@ RSpec.describe Geo::Secondary::RegistryConsistencyWorker, :geo, :geo_fdw do
# Somewhat of an integration test
it 'creates missing registries for each registry class' do
lfs_object = create(:lfs_object)
job_artifact = create(:ci_job_artifact)
lfs_object = create(:lfs_object)
project = create(:project)
upload = create(:upload)
package_file = create(:conan_package_file, :conan_package)
expect(Geo::LfsObjectRegistry.where(lfs_object_id: lfs_object.id).count).to eq(0)
expect(Geo::JobArtifactRegistry.where(artifact_id: job_artifact.id).count).to eq(0)
expect(Geo::ProjectRegistry.where(project_id: project.id).count).to eq(0)
expect(Geo::UploadRegistry.where(file_id: upload.id).count).to eq(0)
expect(Geo::PackageFileRegistry.where(package_file_id: package_file.id).count).to eq(0)
......@@ -90,13 +92,12 @@ RSpec.describe Geo::Secondary::RegistryConsistencyWorker, :geo, :geo_fdw do
expect(Geo::LfsObjectRegistry.where(lfs_object_id: lfs_object.id).count).to eq(1)
expect(Geo::JobArtifactRegistry.where(artifact_id: job_artifact.id).count).to eq(1)
expect(Geo::ProjectRegistry.where(project_id: project.id).count).to eq(1)
expect(Geo::UploadRegistry.where(file_id: upload.id).count).to eq(1)
expect(Geo::PackageFileRegistry.where(package_file_id: package_file.id).count).to eq(1)
end
context 'when geo_file_registry_ssot_sync is disabled' do
let_it_be(:upload) { create(:upload) }
before do
stub_feature_flags(geo_file_registry_ssot_sync: false)
end
......@@ -109,6 +110,7 @@ RSpec.describe Geo::Secondary::RegistryConsistencyWorker, :geo, :geo_fdw do
allow(Geo::RegistryConsistencyService).to receive(:new).with(Geo::JobArtifactRegistry, batch_size: 1000).and_call_original
allow(Geo::RegistryConsistencyService).to receive(:new).with(Geo::LfsObjectRegistry, batch_size: 1000).and_call_original
allow(Geo::RegistryConsistencyService).to receive(:new).with(Geo::PackageFileRegistry, batch_size: 1000).and_call_original
allow(Geo::RegistryConsistencyService).to receive(:new).with(Geo::ProjectRegistry, batch_size: 1000).and_call_original
expect(Geo::RegistryConsistencyService).not_to receive(:new).with(Geo::UploadRegistry, batch_size: 1000)
......@@ -116,6 +118,27 @@ RSpec.describe Geo::Secondary::RegistryConsistencyWorker, :geo, :geo_fdw do
end
end
context 'when geo_project_registry_ssot_sync is disabled' do
before do
stub_feature_flags(geo_project_registry_ssot_sync: false)
end
it 'returns false' do
expect(subject.perform).to be_falsey
end
it 'does not execute RegistryConsistencyService for projects' do
allow(Geo::RegistryConsistencyService).to receive(:new).with(Geo::JobArtifactRegistry, batch_size: 1000).and_call_original
allow(Geo::RegistryConsistencyService).to receive(:new).with(Geo::LfsObjectRegistry, batch_size: 1000).and_call_original
allow(Geo::RegistryConsistencyService).to receive(:new).with(Geo::PackageFileRegistry, batch_size: 1000).and_call_original
allow(Geo::RegistryConsistencyService).to receive(:new).with(Geo::UploadRegistry, batch_size: 1000).and_call_original
expect(Geo::RegistryConsistencyService).not_to receive(:new).with(Geo::ProjectRegistry, batch_size: 1000)
subject.perform
end
end
context 'when the current Geo node is disabled or primary' do
before do
stub_primary_node
......
......@@ -2,16 +2,31 @@
require 'spec_helper'
RSpec.describe GeoRepositoryDestroyWorker do
RSpec.describe GeoRepositoryDestroyWorker, :geo do
describe '#perform' do
it 'delegates project removal to Geo::RepositoryDestroyService' do
project = create(:project)
let(:project) { create(:project) }
expect_next_instance_of(Geo::RepositoryDestroyService) do |instance|
expect(instance).to receive(:execute)
context 'with an existing project' do
it 'delegates project removal to Geo::RepositoryDestroyService' do
expect_next_instance_of(Geo::RepositoryDestroyService) do |instance|
expect(instance).to receive(:execute)
end
subject.perform(project.id, project.name, project.path, 'default')
end
end
context 'with project ID from an orphaned registry' do
it 'delegates project removal to Geo::RepositoryDestroyService' do
registry = create(:geo_project_registry, project_id: project.id)
project.delete
described_class.new.perform(project.id, project.name, project.path, 'default')
expect_next_instance_of(Geo::RepositoryDestroyService) do |instance|
expect(instance).to receive(:execute)
end
subject.perform(registry.project_id)
end
end
end
end
......@@ -43,6 +43,10 @@ module Gitlab
@maven_app_name_regex ||= /\A[\w\-\.]+\z/.freeze
end
def maven_version_regex
@maven_version_regex ||= /\A(\.?[\w\+-]+\.?)+\z/.freeze
end
def maven_app_group_regex
maven_app_name_regex
end
......
......@@ -972,6 +972,9 @@ msgstr ""
msgid "A Let's Encrypt account will be configured for this GitLab installation using your email address. You will receive emails to warn of expiring certificates."
msgstr ""
msgid "A Terraform report was generated in your pipelines."
msgstr ""
msgid "A basic page and serverless function that uses AWS Lambda, AWS API Gateway, and GitLab Pages"
msgstr ""
......@@ -1059,9 +1062,6 @@ msgstr ""
msgid "A suggestion is not applicable."
msgstr ""
msgid "A terraform report was generated in your pipelines."
msgstr ""
msgid "A user with write access to the source branch selected this option"
msgstr ""
......@@ -2392,9 +2392,6 @@ msgstr ""
msgid "An error occurred while loading project creation UI"
msgstr ""
msgid "An error occurred while loading terraform report"
msgstr ""
msgid "An error occurred while loading the data. Please try again."
msgstr ""
......@@ -4054,9 +4051,6 @@ msgstr ""
msgid "Changes are still tracked. Useful for cluster/index migrations."
msgstr ""
msgid "Changes are unknown"
msgstr ""
msgid "Changes suppressed. Click to show."
msgstr ""
......@@ -10187,6 +10181,9 @@ msgstr ""
msgid "Generate new export"
msgstr ""
msgid "Generating the report caused an error."
msgstr ""
msgid "Geo"
msgstr ""
......@@ -22235,6 +22232,9 @@ msgstr ""
msgid "The Prometheus server responded with \"bad request\". Please check your queries are correct and are supported in your Prometheus version. %{documentationLink}"
msgstr ""
msgid "The Terraform report %{name} was generated in your pipelines."
msgstr ""
msgid "The URL defined on the primary node that secondary nodes should use to contact it. Defaults to URL"
msgstr ""
......
......@@ -75,7 +75,7 @@ RSpec.describe SortingPreference do
it 'sets the cookie with the right values and flags' do
subject
expect(cookies['issue_sort']).to eq(value: 'popularity', secure: false, httponly: false)
expect(cookies['issue_sort']).to eq(expires: nil, value: 'popularity', secure: false, httponly: false)
end
end
......@@ -86,7 +86,7 @@ RSpec.describe SortingPreference do
it 'sets the cookie with the right values and flags' do
subject
expect(cookies['issue_sort']).to eq(value: 'created_asc', secure: false, httponly: false)
expect(cookies['issue_sort']).to eq(expires: nil, value: 'created_asc', secure: false, httponly: false)
end
end
end
......
......@@ -91,6 +91,17 @@ RSpec.describe Projects::MergeRequests::DiffsController do
end
end
shared_examples "diff note on-demand position creation" do
it "updates diff discussion positions" do
service = double("service")
expect(Discussions::CaptureDiffNotePositionsService).to receive(:new).with(merge_request).and_return(service)
expect(service).to receive(:execute)
go
end
end
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
......@@ -146,6 +157,7 @@ RSpec.describe Projects::MergeRequests::DiffsController do
it_behaves_like 'persisted preferred diff view cookie'
it_behaves_like 'cached diff collection'
it_behaves_like 'diff note on-demand position creation'
end
describe 'GET diffs_metadata' do
......
......@@ -10,6 +10,7 @@ import {
addDashboardMetaDataToLink,
normalizeCustomDashboardPath,
} from '~/monitoring/stores/utils';
import * as urlUtils from '~/lib/utils/url_utility';
import { annotationsData } from '../mock_data';
import { NOT_IN_DB_PREFIX } from '~/monitoring/constants';
......@@ -399,6 +400,118 @@ describe('mapToDashboardViewModel', () => {
});
});
});
describe('templating variables mapping', () => {
beforeEach(() => {
jest.spyOn(urlUtils, 'queryToObject');
});
afterEach(() => {
urlUtils.queryToObject.mockRestore();
});
it('sets variables as-is from yml file if URL has no variables', () => {
const response = {
dashboard: 'Dashboard Name',
links: [],
templating: {
variables: {
pod: 'kubernetes',
pod_2: 'kubernetes-2',
},
},
};
urlUtils.queryToObject.mockReturnValueOnce();
expect(mapToDashboardViewModel(response)).toMatchObject({
dashboard: 'Dashboard Name',
links: [],
variables: {
pod: {
label: 'pod',
type: 'text',
value: 'kubernetes',
},
pod_2: {
label: 'pod_2',
type: 'text',
value: 'kubernetes-2',
},
},
});
});
it('sets variables as-is from yml file if URL has no matching variables', () => {
const response = {
dashboard: 'Dashboard Name',
links: [],
templating: {
variables: {
pod: 'kubernetes',
pod_2: 'kubernetes-2',
},
},
};
urlUtils.queryToObject.mockReturnValueOnce({
'var-environment': 'POD',
});
expect(mapToDashboardViewModel(response)).toMatchObject({
dashboard: 'Dashboard Name',
links: [],
variables: {
pod: {
label: 'pod',
type: 'text',
value: 'kubernetes',
},
pod_2: {
label: 'pod_2',
type: 'text',
value: 'kubernetes-2',
},
},
});
});
it('merges variables from URL with the ones from yml file', () => {
const response = {
dashboard: 'Dashboard Name',
links: [],
templating: {
variables: {
pod: 'kubernetes',
pod_2: 'kubernetes-2',
},
},
};
urlUtils.queryToObject.mockReturnValueOnce({
'var-environment': 'POD',
'var-pod': 'POD1',
'var-pod_2': 'POD2',
});
expect(mapToDashboardViewModel(response)).toMatchObject({
dashboard: 'Dashboard Name',
links: [],
variables: {
pod: {
label: 'pod',
type: 'text',
value: 'POD1',
},
pod_2: {
label: 'pod_2',
type: 'text',
value: 'POD2',
},
},
});
});
});
});
describe('uniqMetricsId', () => {
......
import { parseTemplatingVariables } from '~/monitoring/stores/variable_mapping';
import { parseTemplatingVariables, mergeURLVariables } from '~/monitoring/stores/variable_mapping';
import * as urlUtils from '~/lib/utils/url_utility';
import { mockTemplatingData, mockTemplatingDataResponses } from '../mock_data';
describe('parseTemplatingVariables', () => {
......@@ -21,3 +22,73 @@ describe('parseTemplatingVariables', () => {
expect(parseTemplatingVariables(input?.dashboard?.templating)).toEqual(expected);
});
});
describe('mergeURLVariables', () => {
beforeEach(() => {
jest.spyOn(urlUtils, 'queryToObject');
});
afterEach(() => {
urlUtils.queryToObject.mockRestore();
});
it('returns empty object if variables are not defined in yml or URL', () => {
urlUtils.queryToObject.mockReturnValueOnce({});
expect(mergeURLVariables({})).toEqual({});
});
it('returns empty object if variables are defined in URL but not in yml', () => {
urlUtils.queryToObject.mockReturnValueOnce({
'var-env': 'one',
'var-instance': 'localhost',
});
expect(mergeURLVariables({})).toEqual({});
});
it('returns yml variables if variables defined in yml but not in the URL', () => {
urlUtils.queryToObject.mockReturnValueOnce({});
const params = {
env: 'one',
instance: 'localhost',
};
expect(mergeURLVariables(params)).toEqual(params);
});
it('returns yml variables if variables defined in URL do not match with yml variables', () => {
const urlParams = {
'var-env': 'one',
'var-instance': 'localhost',
};
const ymlParams = {
pod: { value: 'one' },
service: { value: 'database' },
};
urlUtils.queryToObject.mockReturnValueOnce(urlParams);
expect(mergeURLVariables(ymlParams)).toEqual(ymlParams);
});
it('returns merged yml and URL variables if there is some match', () => {
const urlParams = {
'var-env': 'one',
'var-instance': 'localhost:8080',
};
const ymlParams = {
instance: { value: 'localhost' },
service: { value: 'database' },
};
const merged = {
instance: { value: 'localhost:8080' },
service: { value: 'database' },
};
urlUtils.queryToObject.mockReturnValueOnce(urlParams);
expect(mergeURLVariables(ymlParams)).toEqual(merged);
});
});
......@@ -169,8 +169,8 @@ describe('monitoring/utils', () => {
});
});
describe('getPromCustomVariablesFromUrl', () => {
const { getPromCustomVariablesFromUrl } = monitoringUtils;
describe('templatingVariablesFromUrl', () => {
const { templatingVariablesFromUrl } = monitoringUtils;
beforeEach(() => {
jest.spyOn(urlUtils, 'queryToObject');
......@@ -195,7 +195,7 @@ describe('monitoring/utils', () => {
'var-pod': 'POD',
});
expect(getPromCustomVariablesFromUrl()).toEqual(expect.objectContaining({ pod: 'POD' }));
expect(templatingVariablesFromUrl()).toEqual(expect.objectContaining({ pod: 'POD' }));
});
it('returns an empty object when no custom variables are present', () => {
......@@ -203,7 +203,7 @@ describe('monitoring/utils', () => {
dashboard: '.gitlab/dashboards/custom_dashboard.yml',
});
expect(getPromCustomVariablesFromUrl()).toStrictEqual({});
expect(templatingVariablesFromUrl()).toStrictEqual({});
});
});
......@@ -427,76 +427,6 @@ describe('monitoring/utils', () => {
});
});
describe('mergeURLVariables', () => {
beforeEach(() => {
jest.spyOn(urlUtils, 'queryToObject');
});
afterEach(() => {
urlUtils.queryToObject.mockRestore();
});
it('returns empty object if variables are not defined in yml or URL', () => {
urlUtils.queryToObject.mockReturnValueOnce({});
expect(monitoringUtils.mergeURLVariables({})).toEqual({});
});
it('returns empty object if variables are defined in URL but not in yml', () => {
urlUtils.queryToObject.mockReturnValueOnce({
'var-env': 'one',
'var-instance': 'localhost',
});
expect(monitoringUtils.mergeURLVariables({})).toEqual({});
});
it('returns yml variables if variables defined in yml but not in the URL', () => {
urlUtils.queryToObject.mockReturnValueOnce({});
const params = {
env: 'one',
instance: 'localhost',
};
expect(monitoringUtils.mergeURLVariables(params)).toEqual(params);
});
it('returns yml variables if variables defined in URL do not match with yml variables', () => {
const urlParams = {
'var-env': 'one',
'var-instance': 'localhost',
};
const ymlParams = {
pod: { value: 'one' },
service: { value: 'database' },
};
urlUtils.queryToObject.mockReturnValueOnce(urlParams);
expect(monitoringUtils.mergeURLVariables(ymlParams)).toEqual(ymlParams);
});
it('returns merged yml and URL variables if there is some match', () => {
const urlParams = {
'var-env': 'one',
'var-instance': 'localhost:8080',
};
const ymlParams = {
instance: { value: 'localhost' },
service: { value: 'database' },
};
const merged = {
instance: { value: 'localhost:8080' },
service: { value: 'database' },
};
urlUtils.queryToObject.mockReturnValueOnce(urlParams);
expect(monitoringUtils.mergeURLVariables(ymlParams)).toEqual(merged);
});
});
describe('convertVariablesForURL', () => {
it.each`
input | expected
......
export const invalidPlan = {};
export const validPlan = {
create: 10,
update: 20,
delete: 30,
job_name: 'Plan Changes',
job_path: '/path/to/ci/logs/1',
};
export const plans = {
'1': validPlan,
'2': invalidPlan,
'3': {
create: 1,
update: 2,
delete: 3,
job_name: 'Plan 3',
job_path: '/path/to/ci/logs/3',
},
};
import { GlLink, GlLoadingIcon, GlSprintf } from '@gitlab/ui';
import { GlSkeletonLoading } from '@gitlab/ui';
import { plans } from './mock_data';
import { shallowMount } from '@vue/test-utils';
import axios from '~/lib/utils/axios_utils';
import MockAdapter from 'axios-mock-adapter';
import MrWidgetTerraformPlan from '~/vue_merge_request_widget/components/mr_widget_terraform_plan.vue';
import MrWidgetTerraformContainer from '~/vue_merge_request_widget/components/terraform/mr_widget_terraform_container.vue';
import Poll from '~/lib/utils/poll';
import TerraformPlan from '~/vue_merge_request_widget/components/terraform/terraform_plan.vue';
const plan = {
create: 10,
update: 20,
delete: 30,
job_path: '/path/to/ci/logs',
};
describe('MrWidgetTerraformPlan', () => {
describe('MrWidgetTerraformConainer', () => {
let mock;
let wrapper;
const propsData = { endpoint: '/path/to/terraform/report.json' };
const findPlans = () => wrapper.findAll(TerraformPlan).wrappers.map(x => x.props('plan'));
const mockPollingApi = (response, body, header) => {
mock.onGet(propsData.endpoint).reply(response, body, header);
};
const mountWrapper = () => {
wrapper = shallowMount(MrWidgetTerraformPlan, { propsData });
wrapper = shallowMount(MrWidgetTerraformContainer, { propsData });
return axios.waitForAll();
};
......@@ -36,9 +33,9 @@ describe('MrWidgetTerraformPlan', () => {
mock.restore();
});
describe('loading poll', () => {
describe('when data is loading', () => {
beforeEach(() => {
mockPollingApi(200, { '123': plan }, {});
mockPollingApi(200, plans, {});
return mountWrapper().then(() => {
wrapper.setData({ loading: true });
......@@ -46,28 +43,20 @@ describe('MrWidgetTerraformPlan', () => {
});
});
it('Diplays loading icon when loading is true', () => {
expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
expect(wrapper.find(GlSprintf).exists()).toBe(false);
it('diplays loading skeleton', () => {
expect(wrapper.find(GlSkeletonLoading).exists()).toBe(true);
expect(wrapper.text()).not.toContain(
'A terraform report was generated in your pipelines. Changes are unknown',
);
expect(findPlans()).toEqual([]);
});
});
describe('successful poll', () => {
describe('polling', () => {
let pollRequest;
let pollStop;
beforeEach(() => {
pollRequest = jest.spyOn(Poll.prototype, 'makeRequest');
pollStop = jest.spyOn(Poll.prototype, 'stop');
mockPollingApi(200, { '123': plan }, {});
return mountWrapper();
});
afterEach(() => {
......@@ -75,33 +64,43 @@ describe('MrWidgetTerraformPlan', () => {
pollStop.mockRestore();
});
it('content change text', () => {
expect(wrapper.find(GlSprintf).exists()).toBe(true);
});
describe('successful poll', () => {
beforeEach(() => {
mockPollingApi(200, plans, {});
it('renders button when url is found', () => {
expect(wrapper.find(GlLink).exists()).toBe(true);
});
return mountWrapper();
});
it('does not make additional requests after poll is successful', () => {
expect(pollRequest).toHaveBeenCalledTimes(1);
expect(pollStop).toHaveBeenCalledTimes(1);
});
});
it('diplays terraform components and stops loading', () => {
expect(wrapper.find(GlSkeletonLoading).exists()).toBe(false);
describe('polling fails', () => {
beforeEach(() => {
mockPollingApi(500, null, {});
return mountWrapper();
expect(findPlans()).toEqual(Object.values(plans));
});
it('does not make additional requests after poll is successful', () => {
expect(pollRequest).toHaveBeenCalledTimes(1);
expect(pollStop).toHaveBeenCalledTimes(1);
});
});
it('does not display changes text when api fails', () => {
expect(wrapper.text()).toContain(
'A terraform report was generated in your pipelines. Changes are unknown',
);
describe('polling fails', () => {
beforeEach(() => {
mockPollingApi(500, null, {});
return mountWrapper();
});
it('stops loading', () => {
expect(wrapper.find(GlSkeletonLoading).exists()).toBe(false);
});
expect(wrapper.find('.js-terraform-report-link').exists()).toBe(false);
expect(wrapper.find(GlLink).exists()).toBe(false);
it('generates one broken plan', () => {
expect(findPlans()).toEqual([{}]);
});
it('does not make additional requests after poll is unsuccessful', () => {
expect(pollRequest).toHaveBeenCalledTimes(1);
expect(pollStop).toHaveBeenCalledTimes(1);
});
});
});
});
import { invalidPlan, validPlan } from './mock_data';
import { GlLink, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import TerraformPlan from '~/vue_merge_request_widget/components/terraform/terraform_plan.vue';
describe('TerraformPlan', () => {
let wrapper;
const findLogButton = () => wrapper.find('.js-terraform-report-link');
const mountWrapper = propsData => {
wrapper = shallowMount(TerraformPlan, { stubs: { GlLink, GlSprintf }, propsData });
};
afterEach(() => {
wrapper.destroy();
});
describe('validPlan', () => {
beforeEach(() => {
mountWrapper({ plan: validPlan });
});
it('diplays the plan job_name', () => {
expect(wrapper.text()).toContain(
`The Terraform report ${validPlan.job_name} was generated in your pipelines.`,
);
});
it('diplays the reported changes', () => {
expect(wrapper.text()).toContain(
`Reported Resource Changes: ${validPlan.create} to add, ${validPlan.update} to change, ${validPlan.delete} to delete`,
);
});
it('renders button when url is found', () => {
expect(findLogButton().exists()).toBe(true);
expect(findLogButton().text()).toEqual('View full log');
});
});
describe('invalidPlan', () => {
beforeEach(() => {
mountWrapper({ plan: invalidPlan });
});
it('diplays generic header since job_name is missing', () => {
expect(wrapper.text()).toContain('A Terraform report was generated in your pipelines.');
});
it('diplays generic error since report values are missing', () => {
expect(wrapper.text()).toContain('Generating the report caused an error.');
});
it('does not render button because url is missing', () => {
expect(findLogButton().exists()).toBe(false);
});
});
});
const buildMockTextNode = literal => {
return {
firstChild: null,
literal,
type: 'text',
};
};
const buildMockListNode = literal => {
return {
firstChild: {
firstChild: {
firstChild: buildMockTextNode(literal),
type: 'paragraph',
},
type: 'item',
},
type: 'list',
};
};
export const kramdownListNode = buildMockListNode('TOC');
export const normalListNode = buildMockListNode('Just another bullet point');
export const kramdownTextNode = buildMockTextNode('{:toc}');
export const normalTextNode = buildMockTextNode('This is just normal text.');
const uneditableOpenToken = {
type: 'openTag',
tagName: 'div',
attributes: { contenteditable: false },
classNames: [
'gl-px-4 gl-py-2 gl-opacity-5 gl-bg-gray-100 gl-user-select-none gl-cursor-not-allowed',
],
};
export const uneditableCloseToken = { type: 'closeTag', tagName: 'div' };
export const originToken = {
type: 'text',
content: '{:.no_toc .hidden-md .hidden-lg}',
};
export const uneditableOpenTokens = [uneditableOpenToken, originToken];
export const uneditableTokens = [...uneditableOpenTokens, uneditableCloseToken];
import buildCustomHTMLRenderer from '~/vue_shared/components/rich_content_editor/services/build_custom_renderer';
describe('Build Custom Renderer Service', () => {
describe('buildCustomHTMLRenderer', () => {
it('should return an object with the default renderer functions when lacking arguments', () => {
expect(buildCustomHTMLRenderer()).toEqual(
expect.objectContaining({
list: expect.any(Function),
text: expect.any(Function),
}),
);
});
it('should return an object with both custom and default renderer functions when passed customRenderers', () => {
const mockHtmlCustomRenderer = jest.fn();
const customRenderers = {
html: [mockHtmlCustomRenderer],
};
expect(buildCustomHTMLRenderer(customRenderers)).toEqual(
expect.objectContaining({
html: expect.any(Function),
list: expect.any(Function),
text: expect.any(Function),
}),
);
});
});
});
import {
buildUneditableOpenTokens,
buildUneditableCloseToken,
buildUneditableTokens,
} from '~/vue_shared/components/rich_content_editor/services/renderers//build_uneditable_token';
import {
originToken,
uneditableOpenTokens,
uneditableCloseToken,
uneditableTokens,
} from '../../mock_data';
describe('Build Uneditable Token renderer helper', () => {
describe('buildUneditableOpenTokens', () => {
it('returns a 2-item array of tokens with the originToken appended to an open token', () => {
const result = buildUneditableOpenTokens(originToken);
expect(result).toHaveLength(2);
expect(result).toStrictEqual(uneditableOpenTokens);
});
});
describe('buildUneditableCloseToken', () => {
it('returns an object literal representing the uneditable close token', () => {
expect(buildUneditableCloseToken()).toStrictEqual(uneditableCloseToken);
});
});
describe('buildUneditableTokens', () => {
it('returns a 3-item array of tokens with the originToken wrapped in the middle', () => {
const result = buildUneditableTokens(originToken);
expect(result).toHaveLength(3);
expect(result).toStrictEqual(uneditableTokens);
});
});
});
import renderer from '~/vue_shared/components/rich_content_editor/services/renderers/render_kramdown_list';
import {
buildUneditableOpenTokens,
buildUneditableCloseToken,
} from '~/vue_shared/components/rich_content_editor/services/renderers//build_uneditable_token';
import { kramdownListNode, normalListNode } from '../../mock_data';
describe('Render Kramdown List renderer', () => {
describe('canRender', () => {
it('should return true when the argument is a special kramdown TOC ordered/unordered list', () => {
expect(renderer.canRender(kramdownListNode)).toBe(true);
});
it('should return false when the argument is a normal ordered/unordered list', () => {
expect(renderer.canRender(normalListNode)).toBe(false);
});
});
describe('render', () => {
const origin = jest.fn();
it('should return uneditable open tokens when entering', () => {
const context = { entering: true, origin };
expect(renderer.render(context)).toStrictEqual(buildUneditableOpenTokens(origin()));
});
it('should return an uneditable close tokens when exiting', () => {
const context = { entering: false, origin };
expect(renderer.render(context)).toStrictEqual(buildUneditableCloseToken(origin()));
});
});
});
import renderer from '~/vue_shared/components/rich_content_editor/services/renderers/render_kramdown_text';
import { buildUneditableTokens } from '~/vue_shared/components/rich_content_editor/services/renderers//build_uneditable_token';
import { kramdownTextNode, normalTextNode } from '../../mock_data';
describe('Render Kramdown Text renderer', () => {
describe('canRender', () => {
it('should return true when the argument `literal` has kramdown syntax', () => {
expect(renderer.canRender(kramdownTextNode)).toBe(true);
});
it('should return false when the argument `literal` lacks kramdown syntax', () => {
expect(renderer.canRender(normalTextNode)).toBe(false);
});
});
describe('render', () => {
const origin = jest.fn();
it('should return uneditable tokens', () => {
const context = { origin };
expect(renderer.render(context)).toStrictEqual(buildUneditableTokens(origin()));
});
});
});
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe CookiesHelper do
describe '#set_secure_cookie' do
it 'creates an encrypted cookie with expected attributes' do
stub_config_setting(https: true)
expiration = 1.month.from_now
key = :secure_cookie
value = 'secure value'
expect_next_instance_of(ActionDispatch::Cookies::EncryptedKeyRotatingCookieJar) do |instance|
expect(instance).to receive(:[]=).with(key, httponly: true, secure: true, expires: expiration, value: value)
end
helper.set_secure_cookie(key, value, httponly: true, expires: expiration, type: CookiesHelper::COOKIE_TYPE_ENCRYPTED)
end
it 'creates a permanent cookie with expected attributes' do
key = :permanent_cookie
value = 'permanent value'
expect_next_instance_of(ActionDispatch::Cookies::PermanentCookieJar) do |instance|
expect(instance).to receive(:[]=).with(key, httponly: false, secure: false, expires: nil, value: value)
end
helper.set_secure_cookie(key, value, type: CookiesHelper::COOKIE_TYPE_PERMANENT)
end
it 'creates a regular cookie with expected attributes' do
key = :regular_cookie
value = 'regular value'
expect_next_instance_of(ActionDispatch::Cookies::CookieJar) do |instance|
expect(instance).to receive(:[]=).with(key, httponly: false, secure: false, expires: nil, value: value)
end
helper.set_secure_cookie(key, value)
end
end
end
......@@ -262,6 +262,39 @@ describe Gitlab::Regex do
it { is_expected.not_to match('!!()()') }
end
describe '.maven_version_regex' do
subject { described_class.maven_version_regex }
it { is_expected.to match('0')}
it { is_expected.to match('1') }
it { is_expected.to match('03') }
it { is_expected.to match('2.0') }
it { is_expected.to match('01.2') }
it { is_expected.to match('10.2.3-beta')}
it { is_expected.to match('1.2-SNAPSHOT') }
it { is_expected.to match('20') }
it { is_expected.to match('20.3') }
it { is_expected.to match('1.2.1') }
it { is_expected.to match('1.4.2-12') }
it { is_expected.to match('1.2-beta-2') }
it { is_expected.to match('12.1.2-2-1') }
it { is_expected.to match('1.1-beta-2') }
it { is_expected.to match('1.3.350.v20200505-1744') }
it { is_expected.to match('2.0.0.v200706041905-7C78EK9E_EkMNfNOd2d8qq') }
it { is_expected.to match('1.2-alpha-1-20050205.060708-1') }
it { is_expected.to match('703220b4e2cea9592caeb9f3013f6b1e5335c293') }
it { is_expected.to match('RELEASE') }
it { is_expected.not_to match('..1.2.3') }
it { is_expected.not_to match(' 1.2.3') }
it { is_expected.not_to match("1.2.3 \r\t") }
it { is_expected.not_to match("\r\t 1.2.3") }
it { is_expected.not_to match('1./2.3') }
it { is_expected.not_to match('1.2.3-4/../../') }
it { is_expected.not_to match('1.2.3-4%2e%2e%') }
it { is_expected.not_to match('../../../../../1.2.3') }
it { is_expected.not_to match('%2e%2e%2f1.2.3') }
end
describe '.semver_regex' do
subject { described_class.semver_regex }
......
......@@ -262,4 +262,44 @@ describe Noteable do
end
end
end
describe "#has_any_diff_note_positions?" do
let(:source_branch) { "compare-with-merge-head-source" }
let(:target_branch) { "compare-with-merge-head-target" }
let(:merge_request) { create(:merge_request, source_branch: source_branch, target_branch: target_branch) }
let!(:note) do
path = "files/markdown/ruby-style-guide.md"
position = Gitlab::Diff::Position.new(
old_path: path,
new_path: path,
new_line: 508,
diff_refs: merge_request.diff_refs
)
create(:diff_note_on_merge_request, project: merge_request.project, position: position, noteable: merge_request)
end
before do
MergeRequests::MergeToRefService.new(merge_request.project, merge_request.author).execute(merge_request)
Discussions::CaptureDiffNotePositionsService.new(merge_request).execute
end
it "returns true when it has diff note positions" do
expect(merge_request.has_any_diff_note_positions?).to be(true)
end
it "returns false when it has notes but no diff note positions" do
DiffNotePosition.where(note: note).find_each(&:delete)
expect(merge_request.has_any_diff_note_positions?).to be(false)
end
it "returns false when it has no notes" do
merge_request.notes.find_each(&:destroy)
expect(merge_request.has_any_diff_note_positions?).to be(false)
end
end
end
......@@ -9,13 +9,38 @@ RSpec.shared_examples 'known sign in' do
user.update!(current_sign_in_ip: ip)
end
context 'with a valid post' do
context 'when remote IP does not match user last sign in IP' do
before do
stub_user_ip('127.0.0.1')
stub_remote_ip('169.0.0.1')
end
def stub_cookie(value = user.id)
cookies.encrypted[KnownSignIn::KNOWN_SIGN_IN_COOKIE] = {
value: value, expires: KnownSignIn::KNOWN_SIGN_IN_COOKIE_EXPIRY
}
end
context 'when the remote IP and the last sign in IP match' do
before do
stub_user_ip('169.0.0.1')
stub_remote_ip('169.0.0.1')
end
it 'does not notify the user' do
expect_any_instance_of(NotificationService).not_to receive(:unknown_sign_in)
post_action
end
it 'sets/updates the encrypted cookie' do
post_action
expect(cookies.encrypted[KnownSignIn::KNOWN_SIGN_IN_COOKIE]).to eq(user.id)
end
end
context 'when the remote IP and the last sign in IP do not match' do
before do
stub_user_ip('127.0.0.1')
stub_remote_ip('169.0.0.1')
end
context 'when the cookie is not previously set' do
it 'notifies the user' do
expect_next_instance_of(NotificationService) do |instance|
expect(instance).to receive(:unknown_sign_in)
......@@ -23,37 +48,50 @@ RSpec.shared_examples 'known sign in' do
post_action
end
end
context 'when remote IP matches an active session' do
before do
existing_sessions = ActiveSession.session_ids_for_user(user.id)
existing_sessions.each { |sessions| ActiveSession.destroy(user, sessions) }
stub_user_ip('169.0.0.1')
stub_remote_ip('127.0.0.1')
it 'sets the encrypted cookie' do
post_action
ActiveSession.set(user, request)
expect(cookies.encrypted[KnownSignIn::KNOWN_SIGN_IN_COOKIE]).to eq(user.id)
end
end
it 'notifies the user when the cookie is expired' do
stub_cookie
it 'does not notify the user' do
expect_any_instance_of(NotificationService).not_to receive(:unknown_sign_in)
Timecop.freeze((KnownSignIn::KNOWN_SIGN_IN_COOKIE_EXPIRY + 1.day).from_now) do
expect_next_instance_of(NotificationService) do |instance|
expect(instance).to receive(:unknown_sign_in)
end
post_action
end
end
context 'when remote IP address matches last sign in IP' do
before do
stub_user_ip('127.0.0.1')
stub_remote_ip('127.0.0.1')
it 'notifies the user when the cookie is for another user' do
stub_cookie(create(:user).id)
expect_next_instance_of(NotificationService) do |instance|
expect(instance).to receive(:unknown_sign_in)
end
it 'does not notify the user' do
expect_any_instance_of(NotificationService).not_to receive(:unknown_sign_in)
post_action
end
post_action
end
it 'does not notify the user when remote IP matches an active session' do
ActiveSession.set(user, request)
expect_any_instance_of(NotificationService).not_to receive(:unknown_sign_in)
post_action
end
it 'does not notify the user when the cookie is present and not expired' do
stub_cookie
expect_any_instance_of(NotificationService).not_to receive(:unknown_sign_in)
post_action
end
end
end
......@@ -1138,20 +1138,20 @@
dependencies:
defer-to-connect "^1.0.1"
"@toast-ui/editor@^2.0.1":
version "2.0.1"
resolved "https://registry.yarnpkg.com/@toast-ui/editor/-/editor-2.0.1.tgz#749e5be1f02f42ded51488d1575ab1c19ca59952"
integrity sha512-TC481O/zP37boY6H6oVN6KLVMY7yrU8zQu+3xqZ71V3Sr6D2XyaGb2Xub9XqTdqzBmzsf7y4Gi+EXO0IQ3rGVA==
"@toast-ui/editor@2.1.2", "@toast-ui/editor@^2.1.2":
version "2.1.2"
resolved "https://registry.yarnpkg.com/@toast-ui/editor/-/editor-2.1.2.tgz#0472431bd039ae70882d77910e83f0ad222d0b1c"
integrity sha512-yoWRVyp2m1dODH+bmzJaILUgl2L57GCQJ8c8+XRgJMwfxb/TFz5U+oT8JGAU5VwozIzKF0SyVMs8AEePwwhIIA==
dependencies:
"@types/codemirror" "0.0.71"
codemirror "^5.48.4"
"@toast-ui/vue-editor@^2.0.1":
version "2.0.1"
resolved "https://registry.yarnpkg.com/@toast-ui/vue-editor/-/vue-editor-2.0.1.tgz#c9c8c8da4c0a67b9fbc4240464388c67d72a0c22"
integrity sha512-sGsApl0n+GVAZbmPA+tTrq9rmmyh2mRgCgg2/mu1/lN7S4vPv/nQH8KXxLG9Y6hG2+kgelqz6wvbOCdzlM/HmQ==
"@toast-ui/vue-editor@2.1.2":
version "2.1.2"
resolved "https://registry.yarnpkg.com/@toast-ui/vue-editor/-/vue-editor-2.1.2.tgz#a790e69fcf7fb426e6b8ea190733477c3cc756aa"
integrity sha512-RK01W6D8FqtNq4MjWsXk6KRzOU/vL6mpiADAnH5l/lFK4G6UQJhLKsMRfmxIqCH+ivm8VtQzGdd9obUfD+XbCw==
dependencies:
"@toast-ui/editor" "^2.0.1"
"@toast-ui/editor" "^2.1.2"
"@types/anymatch@*":
version "1.3.0"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment