diff --git a/.changelog.yml b/.changelog.yml index 942138327875..657dfa1c0ee5 100644 --- a/.changelog.yml +++ b/.changelog.yml @@ -14,28 +14,28 @@ groups: name: BREAKING labels: - kind/breaking - - - name: FEATURES - labels: - - kind/feature - name: SECURITY labels: - kind/security - - name: API + name: FEATURES labels: - - kind/api + - kind/feature - - name: BUGFIXES + name: API labels: - - kind/bug + - kind/api - name: ENHANCEMENTS labels: - kind/enhancement - kind/refactor - kind/ui + - + name: BUGFIXES + labels: + - kind/bug - name: TESTING labels: diff --git a/.drone.yml b/.drone.yml index d1cedb73cc5b..e766ba613424 100644 --- a/.drone.yml +++ b/.drone.yml @@ -4,7 +4,7 @@ name: compliance platform: os: linux - arch: arm64 + arch: amd64 trigger: event: @@ -15,19 +15,19 @@ trigger: steps: - name: deps-frontend pull: always - image: node:14 + image: node:16 commands: - make node_modules - name: lint-frontend - image: node:14 + image: node:16 commands: - make lint-frontend depends_on: [deps-frontend] - name: lint-backend pull: always - image: gitea/test_env:linux-arm64 # https://gitea.com/gitea/test-env + image: gitea/test_env:linux-amd64 # https://gitea.com/gitea/test-env commands: - make lint-backend environment: @@ -37,7 +37,7 @@ steps: - name: lint-backend-windows pull: always - image: gitea/test_env:linux-arm64 # https://gitea.com/gitea/test-env + image: gitea/test_env:linux-amd64 # https://gitea.com/gitea/test-env commands: - make golangci-lint vet environment: @@ -49,7 +49,7 @@ steps: - name: lint-backend-gogit pull: always - image: gitea/test_env:linux-arm64 # https://gitea.com/gitea/test-env + image: gitea/test_env:linux-amd64 # https://gitea.com/gitea/test-env commands: - make lint-backend environment: @@ -58,7 +58,7 @@ steps: TAGS: bindata gogit sqlite sqlite_unlock_notify - name: checks-frontend - image: node:14 + image: node:16 commands: - make checks-frontend depends_on: [deps-frontend] @@ -71,13 +71,13 @@ steps: depends_on: [lint-backend] - name: test-frontend - image: node:14 + image: node:16 commands: - make test-frontend depends_on: [lint-frontend] - name: build-frontend - image: node:14 + image: node:16 commands: - make frontend depends_on: [test-frontend] @@ -153,7 +153,7 @@ services: MYSQL_DATABASE: test - name: mysql8 - image: mysql:8.0 + image: mysql:8 environment: MYSQL_ALLOW_EMPTY_PASSWORD: yes MYSQL_DATABASE: testgitea @@ -277,7 +277,7 @@ steps: - test-mysql when: branch: - - master + - main event: - push - pull_request @@ -294,7 +294,7 @@ steps: - generate-coverage when: branch: - - master + - main event: - push - pull_request @@ -319,7 +319,7 @@ trigger: services: - name: pgsql pull: default - image: postgres:9.5 + image: postgres:10 environment: POSTGRES_DB: test POSTGRES_PASSWORD: postgres @@ -383,7 +383,7 @@ platform: trigger: branch: - - master + - main event: - cron cron: @@ -404,7 +404,7 @@ steps: - name: update pull: default - image: alpine:3.13 + image: alpine:3.14 commands: - ./build/update-locales.sh @@ -414,6 +414,7 @@ steps: settings: author_email: "teabot@gitea.io" author_name: GiteaBot + branch: main commit: true commit_message: "[skip ci] Updated translations via Crowdin" remote: "git@github.com:go-gitea/gitea.git" @@ -443,7 +444,7 @@ platform: trigger: branch: - - master + - main event: - cron cron: @@ -461,6 +462,7 @@ steps: settings: author_email: "teabot@gitea.io" author_name: GiteaBot + branch: main commit: true commit_message: "[skip ci] Updated licenses and gitignores " remote: "git@github.com:go-gitea/gitea.git" @@ -482,7 +484,7 @@ workspace: trigger: branch: - - master + - main - "release/*" event: - push @@ -501,7 +503,7 @@ steps: pull: always image: techknowlogick/xgo:go-1.16.x commands: - - curl -sL https://deb.nodesource.com/setup_14.x | bash - && apt-get install -y nodejs + - curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs - export PATH=$PATH:$GOPATH/bin - make release environment: @@ -528,7 +530,7 @@ steps: image: plugins/s3:1 settings: acl: public-read - bucket: releases + bucket: gitea-artifacts endpoint: https://storage.gitea.io path_style: true source: "dist/release/*" @@ -545,16 +547,16 @@ steps: event: - push - - name: release-master + - name: release-main image: plugins/s3:1 settings: acl: public-read - bucket: releases + bucket: gitea-artifacts endpoint: https://storage.gitea.io path_style: true source: "dist/release/*" strip_prefix: dist/release/ - target: /gitea/master + target: /gitea/main environment: AWS_ACCESS_KEY_ID: from_secret: aws_access_key_id @@ -562,7 +564,7 @@ steps: from_secret: aws_secret_access_key when: branch: - - master + - main event: - push @@ -597,7 +599,7 @@ steps: pull: always image: techknowlogick/xgo:go-1.16.x commands: - - curl -sL https://deb.nodesource.com/setup_14.x | bash - && apt-get install -y nodejs + - curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs - export PATH=$PATH:$GOPATH/bin - make release environment: @@ -624,7 +626,7 @@ steps: image: plugins/s3:1 settings: acl: public-read - bucket: releases + bucket: gitea-artifacts endpoint: https://storage.gitea.io path_style: true source: "dist/release/*" @@ -683,13 +685,13 @@ steps: from_secret: netlify_token when: branch: - - master + - main event: - push --- kind: pipeline -name: docker-linux-amd64-release +name: docker-linux-amd64-release-version platform: os: linux @@ -701,7 +703,6 @@ depends_on: trigger: ref: - - refs/heads/master - "refs/tags/**" event: exclude: @@ -715,7 +716,7 @@ steps: - name: publish pull: always - image: plugins/docker:linux-amd64 + image: techknowlogick/drone-docker:latest settings: auto_tag: true auto_tag_suffix: linux-amd64 @@ -732,7 +733,7 @@ steps: - pull_request - name: publish-rootless - image: plugins/docker:linux-amd64 + image: techknowlogick/drone-docker:latest settings: dockerfile: Dockerfile.rootless auto_tag: true @@ -752,6 +753,70 @@ steps: exclude: - pull_request +--- +kind: pipeline +name: docker-linux-amd64-release + +platform: + os: linux + arch: amd64 + +depends_on: + - testing-amd64 + - testing-arm64 + +trigger: + ref: + - refs/heads/main + event: + exclude: + - cron + +steps: + - name: fetch-tags + image: docker:git + commands: + - git fetch --tags --force + + - name: publish + pull: always + image: techknowlogick/drone-docker:latest + settings: + auto_tag: false + tags: dev-linux-amd64 + repo: gitea/gitea + build_args: + - GOPROXY=off + password: + from_secret: docker_password + username: + from_secret: docker_username + when: + event: + exclude: + - pull_request + + - name: publish-rootless + image: techknowlogick/drone-docker:latest + settings: + dockerfile: Dockerfile.rootless + auto_tag: false + tags: dev-linux-amd64-rootless + repo: gitea/gitea + build_args: + - GOPROXY=off + password: + from_secret: docker_password + username: + from_secret: docker_username + environment: + PLUGIN_MIRROR: + from_secret: plugin_mirror + when: + event: + exclude: + - pull_request + --- kind: pipeline name: docker-linux-arm64-dry-run @@ -770,7 +835,7 @@ trigger: steps: - name: dryrun pull: always - image: plugins/docker:linux-arm64 + image: techknowlogick/drone-docker:latest settings: dry_run: true repo: gitea/gitea @@ -786,7 +851,7 @@ steps: --- kind: pipeline -name: docker-linux-arm64-release +name: docker-linux-arm64-release-version platform: os: linux @@ -798,7 +863,6 @@ depends_on: trigger: ref: - - refs/heads/master - "refs/tags/**" event: exclude: @@ -812,7 +876,7 @@ steps: - name: publish pull: always - image: plugins/docker:linux-arm64 + image: techknowlogick/drone-docker:latest settings: auto_tag: true auto_tag_suffix: linux-arm64 @@ -832,7 +896,7 @@ steps: - pull_request - name: publish-rootless - image: plugins/docker:linux-arm64 + image: techknowlogick/drone-docker:latest settings: dockerfile: Dockerfile.rootless auto_tag: true @@ -854,7 +918,73 @@ steps: --- kind: pipeline -name: docker-manifest +name: docker-linux-arm64-release + +platform: + os: linux + arch: arm64 + +depends_on: + - testing-amd64 + - testing-arm64 + +trigger: + ref: + - refs/heads/main + event: + exclude: + - cron + +steps: + - name: fetch-tags + image: docker:git + commands: + - git fetch --tags --force + + - name: publish + pull: always + image: techknowlogick/drone-docker:latest + settings: + auto_tag: false + tags: dev-linux-arm64 + repo: gitea/gitea + build_args: + - GOPROXY=off + password: + from_secret: docker_password + username: + from_secret: docker_username + environment: + PLUGIN_MIRROR: + from_secret: plugin_mirror + when: + event: + exclude: + - pull_request + + - name: publish-rootless + image: techknowlogick/drone-docker:latest + settings: + dockerfile: Dockerfile.rootless + auto_tag: false + tags: dev-linux-arm64-rootless + repo: gitea/gitea + build_args: + - GOPROXY=off + password: + from_secret: docker_password + username: + from_secret: docker_username + environment: + PLUGIN_MIRROR: + from_secret: plugin_mirror + when: + event: + exclude: + - pull_request +--- +kind: pipeline +name: docker-manifest-version platform: os: linux @@ -886,12 +1016,54 @@ steps: trigger: ref: - - refs/heads/master - "refs/tags/**" event: exclude: - cron +depends_on: + - docker-linux-amd64-release-version + - docker-linux-arm64-release-version + +--- +kind: pipeline +name: docker-manifest + +platform: + os: linux + arch: amd64 + +steps: + - name: manifest-rootless + pull: always + image: plugins/manifest + settings: + auto_tag: false + ignore_missing: true + spec: docker/manifest.rootless.tmpl + password: + from_secret: docker_password + username: + from_secret: docker_username + + - name: manifest + image: plugins/manifest + settings: + auto_tag: false + ignore_missing: true + spec: docker/manifest.tmpl + password: + from_secret: docker_password + username: + from_secret: docker_username + +trigger: + ref: + - refs/heads/main + event: + exclude: + - cron + depends_on: - docker-linux-amd64-release - docker-linux-arm64-release @@ -909,7 +1081,7 @@ clone: trigger: branch: - - master + - main - "release/*" event: - push @@ -925,7 +1097,10 @@ depends_on: - release-latest - docker-linux-amd64-release - docker-linux-arm64-release + - docker-linux-amd64-release-version + - docker-linux-arm64-release-version - docker-manifest + - docker-manifest-version - docs steps: diff --git a/.eslintrc b/.eslintrc index bdb91ba337cd..438fe404ffb6 100644 --- a/.eslintrc +++ b/.eslintrc @@ -365,6 +365,7 @@ rules: unicorn/no-array-instanceof: [0] unicorn/no-array-push-push: [2] unicorn/no-console-spaces: [0] + unicorn/no-document-cookie: [2] unicorn/no-fn-reference-in-iterator: [0] unicorn/no-for-loop: [0] unicorn/no-hex-escape: [0] @@ -388,6 +389,7 @@ rules: unicorn/numeric-separators-style: [0] unicorn/prefer-add-event-listener: [2] unicorn/prefer-array-find: [2] + unicorn/prefer-array-flat-map: [2] unicorn/prefer-array-flat: [2] unicorn/prefer-array-index-of: [2] unicorn/prefer-array-some: [2] @@ -398,8 +400,10 @@ rules: unicorn/prefer-includes: [2] unicorn/prefer-math-trunc: [2] unicorn/prefer-modern-dom-apis: [0] + unicorn/prefer-module: [2] unicorn/prefer-negative-index: [2] unicorn/prefer-node-append: [0] + unicorn/prefer-node-protocol: [0] unicorn/prefer-node-remove: [0] unicorn/prefer-number-properties: [0] unicorn/prefer-optional-catch-binding: [2] diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 702062490db8..9f5ecab4b060 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,6 +1,6 @@ Please check the following: -1. Make sure you are targeting the `master` branch, pull requests on release branches are only allowed for bug fixes. +1. Make sure you are targeting the `main` branch, pull requests on release branches are only allowed for bug fixes. 2. Read contributing guidelines: https://github.com/go-gitea/gitea/blob/master/CONTRIBUTING.md 3. Describe what your pull request does and which issue you're targeting (if any) diff --git a/.gitignore b/.gitignore index d42ccc48f7a9..637fd7dfdcf5 100644 --- a/.gitignore +++ b/.gitignore @@ -78,6 +78,8 @@ cpu.out /routers/repo/authorized_keys /node_modules /yarn.lock +/yarn-error.log +/npm-debug.log* /public/js /public/serviceworker.js /public/css diff --git a/.golangci.yml b/.golangci.yml index 88168af222b2..c3dd47ec29da 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -70,9 +70,6 @@ issues: - path: modules/log/ linters: - errcheck - - path: routers/routes/web.go - linters: - - dupl - path: routers/api/v1/repo/issue_subscription.go linters: - dupl @@ -110,3 +107,8 @@ issues: - text: "exitAfterDefer:" linters: - gocritic + - path: modules/graceful/manager_windows.go + linters: + - staticcheck + text: "svc.IsAnInteractiveSession is deprecated: Use IsWindowsService instead." + diff --git a/CHANGELOG.md b/CHANGELOG.md index 9a4840009455..63a49e7f07dc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,331 @@ This changelog goes through all the changes that have been made in each release without substantial changes to our git log; to see the highlights of what has been added to each release, please refer to the [blog](https://blog.gitea.io). +## [1.15.0-rc1](https://github.com/go-gitea/gitea/releases/tag/v1.15.0-rc1) - 2021-07-15 + +* BREAKING + * Make app.ini permissions more restrictive (#16266) + * Refactor Webhook + Add X-Hub-Signature (#16176) + * Add asymmetric JWT signing (#16010) + * Clean-up the settings hierarchy for issue_indexer queue (#16001) + * Change default queue settings to be low go-routines (#15964) + * Improve assets handler middleware (#15961) + * Rename StaticUrlPrefix to AssetUrlPrefix (#15779) + * Use a generic markup class to display externally rendered files and diffs (#15735) + * Add frontend testing, require node 12 (#15315) + * Move (custom) assets into subpath `/assets` (#15219) + * Use level config in log section when sub log section not set level (#15176) + * Links in markdown should be absolute to the repository not the server (#15088) +* SECURITY + * Encrypt LDAP bind password in db with SECRET_KEY (#15547) + * Remove random password in Dockerfiles (#15362) +* FEATURES + * Update Go-Git to take advantage of LargeObjectThreshold (#16316) + * Support custom mime type mapping for text files (#16304) + * Link to previous blames in file blame page (#16259) + * Add LRU mem cache implementation (#16226) + * Localize Email Templates (#16200) + * Make command in authorized keys a template (#16003) + * Add possibility to make branch in branch page (#15960) + * Add email headers (#15939) + * Make tasklist checkboxes clickable (#15791) + * Add selecting tags on the compare page (#15723) + * Add cron job to delete old actions from database (#15688) + * On open repository open common cat file batch and batch-check (#15667) + * Add tag protection (#15629) + * Add push to remote mirror repository (#15157) + * Add Image Diff for SVG files (#14867) + * Add dashboard milestone search and repo milestone search by name. (#14866) + * Add LFS Migration and Mirror (#14726) + * Improve notifications for WIP draft PR's (#14663) + * Disable Stars config option (#14653) + * Add option to provide signature for a token to verify key ownership (#14054) + * OAuth2 auto-register (#5123) +* API + * Return updated repository when changing repository using API (#16420) + * Let branch/tag name be a valid ref to get CI status (#16400) + * Add endpoint to get commits of PR (#16300) + * Allow COMMENT reviews to not specify a body (#16229) + * Add subject-type filter to list notification API endpoints (#16177) + * ListReleases add filter for draft and pre-releases (#16175) + * ListIssues add more filters (#16174) + * Issue Search Add filter for MilestoneNames (#16173) + * GET / SET User Settings (#16169) + * Expose repo.GetReviewers() & repo.GetAssignees() (#16168) + * User expose counters (#16167) + * Add repoGetTag (#16166) + * Add repoCreateTag (#16165) + * Creating a repo from a template repo via API (#15958) + * Add Active and ProhibitLogin to API (#15689) + * Add Location, Website and Description to API (#15675) + * Expose resolver via API (#15167) +* ENHANCEMENTS + * Support HTTP/2 in Let's Encrypt (#16371) + * Introduce NotifySubjectType (#16320) + * Add forge emojies (#16296) + * Implemented head_commit for webhooks (#16282) + * Upgrade Gliderlabs SSH to 0.3.3 and add FailedConnectionCallback (#16278) + * Add previous/next buttons to review comments (#16273) + * Review comments: break-word for long file names (#16272) + * Add configuration to restrict allowed user visibility modes (#16271) + * Add scroll-margin-top to account for sticky header (#16269) + * Add --quiet and --verbose to gitea web to control initial logging (#16260) + * Use gitea logging module for git module (#16243) + * Add tests for all webhooks (#16214) + * Add button to delete undeleted repositories from failed migrations (#16197) + * Speed up git diff highlight generation (#16180) + * Add OpenID claims "profile" and "email". (#16141) + * Reintroduce squash merge default comment as a config setting (#16134) + * Add sanitizer rules per renderer (#16110) + * Improve performance of dashboard list orgs (#16099) + * Refactor assert statements in tests (#16089) + * Add sso.Group, context.Auth, context.APIAuth to allow auth special routes (#16086) + * Remove unnecessary goroutine (#16080) + * Add attachments for PR reviews (#16075) + * Make the github migration less rate limit waiting to get comment per page from repository but not per issue (#16070) + * Add Visible modes function from Organisation to Users too (#16069) + * Add checkbox to delete pull branch after successful merge (#16049) + * Make commit info cancelable (#16032) + * Make modules/context.Context a context.Context (#16031) + * Unified custom config creation (#16012) + * Make sshd_config more flexible regarding connections (#16009) + * Append to existing trailers in generated squash commit message (#15980) + * Always store primary email address into email_address table and also the state (#15956) + * Load issue/PR context popup data only when needed (#15955) + * Remove remaining fontawesome usage in templates (#15952) + * Remove fomantic accordion module (#15951) + * Small refactoring of modules/private (#15947) + * Double the avatar size factor (#15941) + * Add curl to rootless docker image (#15908) + * Replace clipboard.js with async clipboard api (#15899) + * Allow custom highlight mapping beyond file extensions (#15808) + * Add trace logging to SSO methods (#15803) + * Refactor routers directory (#15800) + * Allow only internal registration (#15795) + * Add a new internal hook to save ssh log (#15787) + * Respect default merge message syntax when parsing item references (#15772) + * OAuth2 login: Set account link to "login" as default behavior (#15768) + * Use single shared random string generation function (#15741) + * Hold the event source when there are no listeners (#15725) + * Code comments improvements (#15722) + * Provide OIDC compliant user info endpoint (#15721) + * Fix webkit calendar icon color on arc-green (#15713) + * Improve Light Chroma style (#15699) + * Only use boost workers for leveldb shadow queues (#15696) + * Add compare tag dropdown to releases page (#15695) + * Add caret styling CSS (#15651) + * Remove x-ua-compatible meta tag (#15640) + * Refactor of link creation (#15619) + * Add a new table issue_index to store the max issue index so that issue could be deleted with no duplicated index (#15599) + * Rewrite of the LFS server (#15523) + * Display more repository type on admin repository management (#15440) + * Remove usage of some JS globals (#15378) + * SHA in merged commit comment should be rendered ui sha (#15376) + * Add well-known config for OIDC (#15355) + * Use route rather than use thus reducing the number of stack frames (#15301) + * Code Formats, Nits & Unused Func/Var deletions (#15286) + * Let package git depend on setting but not opposite (#15241) + * Fixed sanitize errors (#15240) + * response simple text message for not html request when 404 (#15229) + * Remove file-loader dependency (#15196) + * Refactor renders (#15175) + * Add mimetype mapping settings (#15133) + * Add Status Updates whilst Gitea migrations are occurring (#15076) + * Reload locales in initialisation if needed by utilizing i18n.Reset (#15073) + * Counterwork seemingly unclickable repo button labels (#15064) + * Add DefaultMergeStyle option to repository (#14789) + * Added support for gopher URLs. (#14749) + * Rework repository archive (#14723) + * Add links to toggle WIP status (#14677) + * Add Tabular Diff for CSV files (#14661) + * Use milestone deadline when sorting issues (#14551) +* BUGFIXES + * Fix invalid params and typo of email templates (#16394) + * Fix activation of primary email addresses (#16385) + * Fix calculation for finalPage in repo-search component (#16382) + * Specify user in rootless container numerically (#16361) + * Detect encoding changes while parsing diff (#16330) + * Fix U2F error reasons always hidden (#16327) + * Prevent zombie processes (#16314) + * Escape reference to `user` table in models.SearchEmails (#16313) + * Fix default push instructions on empty repos (#16302) + * Fix modified files list in webhooks when there is a space (#16288) + * Fix webhook commits wrong hash on HEAD reset (#16283) + * Fuzzer finds an NPE due to incorrect URLPrefix (#16249) + * Don't WARN log UserNotExist errors on ExternalUserLogin failure (#16238) + * Do not show No match found for tribute (#16231) + * Fix "Copy Link" for pull requests (#16230) + * Fix diff expansion is missing final line in a file (#16222) + * Fix private repo permission problem (#16142) + * Fix not able to update local created non-urlencoded wiki pages (#16139) + * More efficiently parse shas for shaPostProcessor (#16101) + * Fix `doctor --run check-db-consistency --fix` with label fix (#16094) + * Prevent webhook action buttons from shifting (#16087) + * Change default TMPDIR path in rootless containers (#16077) + * Fix typo and add TODO notice (#16064) + * Use git log name-status in get last commit (#16059) + * Fix 500 Error with branch and tag sharing the same name (#16040) + * Fix get tag when migration (#16014) + * Add custom emoji support (#16004) + * Use filepath.ToSlash and Join in indexer defaults and queues (#15971) + * Add permission check for ``GenerateRepository`` (#15946) + * Ensure settings for Service and Mailer are read on the install page (#15943) + * Fix layout of milestone view (#15927) + * Unregister non-matching serviceworkers (#15834) + * Multiple Queue improvements: LevelDB Wait on empty, shutdown empty shadow level queue, reduce goroutines etc (#15693) + * Attachment support repository route (#15580) + * Fix missing icons and colorpicker when mounted on suburl (#15501) + * Create a session on ReverseProxy and ensure that ReverseProxy users cannot change username (#15304) + * Prevent double-login for Git HTTP and LFS and simplify login (#15303) + * Resolve Object { type: "error", data: undefined } in stopwatch.js (#15278) + * Fix heatmap activity (#15252) + * Remove vendored copy of fomantic-dropdown (#15193) + * Update repository size on cron gc task (#15177) + * Add NeedPostProcess for Parser interface to improve performance of csv parser and some external parser (#15153) + * Add code block highlight to orgmode back (#14222) + * Remove User.GetOrganizations() (#14032) +* TESTING + * Bump `postgres` and `mysql` versions (#15710) + * Add tests for clone from wiki (#15513) + * Fix Benchmark tests, remove a broken one & add two new (#15250) + * Create Proper Migration tests (#15116) +* TRANSLATION + * Use a special name for update default branch on repository setting (#15893) + * Fix mirror_lfs source string in en-US locale (#15369) +* BUILD + * Upgrade xorm to v1.1.1 (#16339) + * Alpine 3.14 released (#16170) + * Disable legal comments in esbuild (#15929) + * Switch to Node 16 to build fronted (#15804) + * Use esbuild to minify CSS (#15756) + * Use binary version of revive linter (#15739) + * Fix: npx webpack make: *** [Makefile:699: public/js/index.js] Error -… (#15465) + * Stop packaging node_modules in release tarballs (#15273) + * Introduce esbuild on webpack (#14578) +* DOCS + * Update queue workers documentation (#15999) + * Comment out app.example.ini (#15807) + * Improve logo customization docs (#15754) + * Add some response status on api docs (#15399) + * Rework Token API comments (#15162) + * Add better errors for disabled account recovery (#15117) +* MISC + * Remove utf8 option from installation page (#16126) + * Use Wants= over Requires= in systemd file (#15897) + +## [1.14.4](https://github.com/go-gitea/gitea/releases/tag/v1.14.4) - 2021-07-06 + +* BUGFIXES + * Fix relative links in postprocessed images (#16334) (#16340) + * Fix list_options GetStartEnd (#16303) (#16305) + * Fix API to use author for commits instead of committer (#16276) (#16277) + * Handle misencoding of login_source cfg in mssql (#16268) (#16275) + * Fixed issues not updated by commits (#16254) (#16261) + * Improve efficiency in FindRenderizableReferenceNumeric and getReference (#16251) (#16255) + * Use html.Parse rather than html.ParseFragment (#16223) (#16225) + * Fix milestone counters on new issue (#16183) (#16224) + * reqOrgMembership calls need to be preceded by reqToken (#16198) (#16219) + +## [1.14.3](https://github.com/go-gitea/gitea/releases/tag/v1.14.3) - 2021-06-18 + +* SECURITY + * Encrypt migration credentials at rest (#15895) (#16187) + * Only check access tokens if they are likely to be tokens (#16164) (#16171) + * Add missing SameSite settings for the i_like_gitea cookie (#16037) (#16039) + * Fix setting of SameSite on cookies (#15989) (#15991) +* API + * Repository object only count releases as releases (#16184) (#16190) + * EditOrg respect RepoAdminChangeTeamAccess option (#16184) (#16190) + * Fix overly strict edit pr permissions (#15900) (#16081) +* BUGFIXES + * Run processors on whole of text (#16155) (#16185) + * Class `issue-keyword` is being incorrectly stripped off spans (#16163) (#16172) + * Fix language switch for install page (#16043) (#16128) + * Fix bug on getIssueIDsByRepoID (#16119) (#16124) + * Set self-adjusting deadline for connection writing (#16068) (#16123) + * Fix http path bug (#16117) (#16120) + * Fix data URI scramble (#16098) (#16118) + * Merge all deleteBranch as one function and also fix bug when delete branch don't close related PRs (#16067) (#16097) + * git migration: don't prompt interactively for clone credentials (#15902) (#16082) + * Fix case change in ownernames (#16045) (#16050) + * Don't manipulate input params in email notification (#16011) (#16033) + * Remove branch URL before IssueRefURL (#15968) (#15970) + * Fix layout of milestone view (#15927) (#15940) + * GitHub Migration, migrate draft releases too (#15884) (#15888) + * Close the gitrepo when deleting the repository (#15876) (#15887) + * Upgrade xorm to v1.1.0 (#15869) (#15885) + * Fix blame row height alignment (#15863) (#15883) + * Fix error message when saving generated LOCAL_ROOT_URL config (#15880) (#15882) + * Backport Fix LFS commit finder not working (#15856) (#15874) + * Stop calling WriteHeader in Write (#15862) (#15873) + * Add timeout to writing to responses (#15831) (#15872) + * Return go-get info on subdirs (#15642) (#15871) + * Restore PAM user autocreation functionality (#15825) (#15867) + * Fix truncate utf8 string (#15828) (#15854) + * Fix bound address/port for caddy's certmagic library (#15758) (#15848) + * Upgrade unrolled/render to v1.1.1 (#15845) (#15846) + * Queue manager FlushAll can loop rapidly - add delay (#15733) (#15840) + * Tagger can be empty, as can Commit and Author - tolerate this (#15835) (#15839) + * Set autocomplete off on branches selector (#15809) (#15833) + * Add missing error to Doctor log (#15813) (#15824) + * Move restore repo to internal router and invoke from command to avoid open the same db file or queues files (#15790) (#15816) +* ENHANCEMENTS + * Removable media support to snap package (#16136) (#16138) + * Move sans-serif fallback font higher than emoji fonts (#15855) (#15892) +* DOCKER + * Only write config in environment-to-ini if there are changes (#15861) (#15868) + * Only offer hostcertificates if they exist (#15849) (#15853) + +## [1.14.2](https://github.com/go-gitea/gitea/releases/tag/v1.14.2) - 2021-05-09 + +* API + * Make change repo settings work on empty repos (#15778) (#15789) + * Add pull "merged" notification subject status to API (#15344) (#15654) +* BUGFIXES + * Ensure that ctx.Written is checked after issues(...) calls (#15797) (#15798) + * Use pulls in commit graph unless pulls are disabled (#15734 & #15740 & #15774) (#15775) + * Set GIT_DIR correctly if it is not set (#15751) (#15769) + * Fix bug where repositories appear unadopted (#15757) (#15767) + * Not show `ref-in-new-issue` pop when issue was disabled (#15761) (#15765) + * Drop back to use IsAnInteractiveSession for SVC (#15749) (#15762) + * Fix setting version table in dump (#15753) (#15759) + * Fix close button change on delete in simplemde area (#15737) (#15747) + * Defer closing the gitrepo until the end of the wrapped context functions (#15653) (#15746) + * Fix some ui bug about draft release (#15137) (#15745) + * Only log Error on getLastCommitStatus error to let pull list still be visible (#15716) (#15715) + * Move tooltip down to allow selection of Remove File on error (#15672) (#15714) + * Fix setting redis db path (#15698) (#15708) + * Fix DB session cleanup (#15697) (#15700) + * Fixed several activation bugs (#15473) (#15685) + * Delete references if repository gets deleted (#15681) (#15684) + * Fix orphaned objects deletion bug (#15657) (#15683) + * Delete protected branch if repository gets removed (#15658) (#15676) + * Remove spurious set name from eventsource.sharedworker.js (#15643) (#15652) + * Not update updated uinx for `git gc` (#15637) (#15641) + * Fix commit graph author link (#15627) (#15630) + * Fix webhook timeout bug (#15613) (#15621) + * Resolve panic on failed interface conversion in migration v156 (#15604) (#15610) + * Fix missing storage init (#15589) (#15598) + * If the default branch is not present do not report error on stats indexing (#15546 & #15583) (#15594) + * Fix lfs management find (#15537) (#15578) + * Fix NPE on view commit with notes (#15561) (#15573) + * Fix bug on commit graph (#15517) (#15530) + * Send size to /avatars if requested (#15459) (#15528) + * Prevent migration 156 failure if tag commit missing (#15519) (#15527) +* ENHANCEMENTS + * Display conflict-free merge messages for pull requests (#15773) (#15796) + * Exponential Backoff for ByteFIFO (#15724) (#15793) + * Issue list alignment tweaks (#15483) (#15766) + * Implement delete release attachments and update release attachments' name (#14130) (#15666) + * Add placeholder text to deploy key textarea (#15575) (#15576) + * Project board improvements (#15429) (#15560) + * Repo branch page: label size, PR ref, new PR button alignment (#15363) (#15365) +* MISC + * Fix webkit calendar icon color on arc-green (#15713) (#15728) + * Performance improvement for last commit cache and show-ref (#15455) (#15701) + * Bump unrolled/render to v1.1.0 (#15581) (#15608) + * Add ETag header (#15370) (#15552) + ## [1.14.1](https://github.com/go-gitea/gitea/releases/tag/v1.14.1) - 2021-04-15 * BUGFIXES diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f62c94c88ede..eb515f26854d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -3,12 +3,14 @@ ## Table of Contents - [Contribution Guidelines](#contribution-guidelines) + - [Table of Contents](#table-of-contents) - [Introduction](#introduction) - [Bug reports](#bug-reports) - [Discuss your design](#discuss-your-design) - [Testing redux](#testing-redux) - [Vendoring](#vendoring) - [Translation](#translation) + - [Building Gitea](#building-gitea) - [Code review](#code-review) - [Styleguide](#styleguide) - [Design guideline](#design-guideline) @@ -226,18 +228,18 @@ We assume in good faith that the information you provide is legally binding. We adopted a release schedule to streamline the process of working on, finishing, and issuing releases. The overall goal is to make a -minor release every two months, which breaks down into one month of +minor release every three or four months, which breaks down into two or three months of general development followed by one month of testing and polishing known as the release freeze. All the feature pull requests should be -merged in the first month of one release period. And, during the frozen -period, a corresponding release branch is open for fixes backported from -master. Release candidates are made during this period for user testing to +merged before feature freeze. And, during the frozen period, a corresponding +release branch is open for fixes backported from main branch. Release candidates +are made during this period for user testing to obtain a final version that is maintained in this branch. A release is maintained by issuing patch releases to only correct critical problems such as crashes or security issues. -Major release cycles are bimonthly. They always begin on the 25th and end on -the 24th (i.e., the 25th of December to February 24th). +Major release cycles are seasonal. They always begin on the 25th and end on +the 24th (i.e., the 25th of December to March 24th). During a development cycle, we may also publish any necessary minor releases for the previous version. For example, if the latest, published release is diff --git a/Dockerfile b/Dockerfile index 1234ba13bef5..43e0de40cc87 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ ################################### #Build stage -FROM golang:1.16-alpine3.13 AS build-env +FROM golang:1.16-alpine3.14 AS build-env ARG GOPROXY ENV GOPROXY ${GOPROXY:-direct} @@ -26,7 +26,7 @@ RUN if [ -n "${GITEA_VERSION}" ]; then git checkout "${GITEA_VERSION}"; fi \ # Begin env-to-ini build RUN go build contrib/environment-to-ini/environment-to-ini.go -FROM alpine:3.13 +FROM alpine:3.14 LABEL maintainer="maintainers@gitea.io" RUN set -x && \ diff --git a/Dockerfile.rootless b/Dockerfile.rootless index a379babc2d9a..d3ba15f109da 100644 --- a/Dockerfile.rootless +++ b/Dockerfile.rootless @@ -1,7 +1,7 @@ ################################### #Build stage -FROM golang:1.16-alpine3.13 AS build-env +FROM golang:1.16-alpine3.14 AS build-env ARG GOPROXY ENV GOPROXY ${GOPROXY:-direct} @@ -25,7 +25,7 @@ RUN if [ -n "${GITEA_VERSION}" ]; then git checkout "${GITEA_VERSION}"; fi \ # Begin env-to-ini build RUN go build contrib/environment-to-ini/environment-to-ini.go -FROM alpine:3.13 +FROM alpine:3.14 LABEL maintainer="maintainers@gitea.io" EXPOSE 2222 3000 @@ -35,6 +35,7 @@ RUN apk --no-cache add \ ca-certificates \ gettext \ git \ + curl \ gnupg RUN addgroup \ @@ -55,10 +56,13 @@ COPY docker/rootless / COPY --from=build-env --chown=root:root /go/src/code.gitea.io/gitea/gitea /usr/local/bin/gitea COPY --from=build-env --chown=root:root /go/src/code.gitea.io/gitea/environment-to-ini /usr/local/bin/environment-to-ini -USER git:git +#git:git +USER 1000:1000 ENV GITEA_WORK_DIR /var/lib/gitea ENV GITEA_CUSTOM /var/lib/gitea/custom ENV GITEA_TEMP /tmp/gitea +ENV TMPDIR /tmp/gitea + #TODO add to docs the ability to define the ini to load (usefull to test and revert a config) ENV GITEA_APP_INI /etc/gitea/app.ini ENV HOME "/var/lib/gitea/git" diff --git a/MAINTAINERS b/MAINTAINERS index d59c85e92e15..f43ba5ab9906 100644 --- a/MAINTAINERS +++ b/MAINTAINERS @@ -42,3 +42,5 @@ Norwin Roosen (@noerw) Kyle Dumont (@kdumontnu) Patrick Schratz (@pat-s) Janis Estelmann (@KN4CK3R) +Steven Kriegler (@justusbunsi) +Jimmy Praet (@jpraet) diff --git a/Makefile b/Makefile index 86577daf8a85..8ee74cbecc40 100644 --- a/Makefile +++ b/Makefile @@ -53,6 +53,9 @@ endif ifeq ($(OS), Windows_NT) GOFLAGS := -v -buildmode=exe EXECUTABLE ?= gitea.exe +else ifeq ($(OS), Windows) + GOFLAGS := -v -buildmode=exe + EXECUTABLE ?= gitea.exe else GOFLAGS := -v EXECUTABLE ?= gitea @@ -70,8 +73,9 @@ EXTRA_GOFLAGS ?= MAKE_EVIDENCE_DIR := .make_evidence -ifneq ($(RACE_ENABLED),) - GOTESTFLAGS ?= -race +ifeq ($(RACE_ENABLED),true) + GOFLAGS += -race + GOTESTFLAGS += -race endif STORED_VERSION_FILE := VERSION @@ -83,7 +87,7 @@ else ifneq ($(DRONE_BRANCH),) VERSION ?= $(subst release/v,,$(DRONE_BRANCH)) else - VERSION ?= master + VERSION ?= main endif STORED_VERSION=$(shell cat $(STORED_VERSION_FILE) 2>/dev/null) @@ -238,7 +242,7 @@ node-check: .PHONY: clean-all clean-all: clean - rm -rf $(WEBPACK_DEST_ENTRIES) $(MAKE_EVIDENCE_DIR) + rm -rf $(WEBPACK_DEST_ENTRIES) $(MAKE_EVIDENCE_DIR) node_modules .PHONY: distclean distclean: clean-all @@ -305,7 +309,10 @@ errcheck: .PHONY: revive revive: - GO111MODULE=on $(GO) run -mod=vendor build/lint.go -config .revive.toml -exclude=./vendor/... ./... || exit 1 + @hash revive > /dev/null 2>&1; if [ $$? -ne 0 ]; then \ + GO111MODULE=off $(GO) get -u github.com/mgechev/revive; \ + fi + @revive -config .revive.toml -exclude=./vendor/... ./... .PHONY: misspell-check misspell-check: @@ -313,7 +320,7 @@ misspell-check: GO111MODULE=off $(GO) get -u github.com/client9/misspell/cmd/misspell; \ fi @echo "Running misspell-check..." - @misspell -error -i unknwon,destory $(GO_SOURCES_OWN) + @misspell -error -i unknwon $(GO_SOURCES_OWN) .PHONY: misspell misspell: @@ -379,7 +386,7 @@ test-backend: @$(GO) test $(GOTESTFLAGS) -mod=vendor -tags='$(TEST_TAGS)' $(GO_PACKAGES) .PHONY: test-frontend -test-frontend: +test-frontend: node_modules @NODE_OPTIONS="--experimental-vm-modules --no-warnings" npx jest --color .PHONY: test-check @@ -397,7 +404,7 @@ test-check: .PHONY: test\#% test\#%: @echo "Running go test with -tags '$(TEST_TAGS)'..." - @$(GO) test -mod=vendor -tags='$(TEST_TAGS)' -run $(subst .,/,$*) $(GO_PACKAGES) + @$(GO) test -mod=vendor $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -run $(subst .,/,$*) $(GO_PACKAGES) .PHONY: coverage coverage: diff --git a/README.md b/README.md index 3bf8115eb254..8d8cf09f44ef 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,13 @@

- Gitea + Gitea

Gitea - Git with a cup of tea

- + @@ -16,7 +16,7 @@ - + @@ -101,6 +101,16 @@ NOTES: 1. **YOU MUST READ THE [CONTRIBUTORS GUIDE](CONTRIBUTING.md) BEFORE STARTING TO WORK ON A PULL REQUEST.** 2. If you have found a vulnerability in the project, please write privately to **security@gitea.io**. Thanks! +## Translating + +Translations are done through Crowdin. If you want to translate to a new language ask one of the managers in the Crowdin project to add a new language there. + +You can also just create an issue for adding a language or ask on discord on the #translation channel. If you need context or find some translation issues, you can leave a comment on the string or ask on Discord. For general translation questions there is a section in the docs. Currently a bit empty but we hope fo fill it as questions pop up. + +https://docs.gitea.io/en-us/translation-guidelines/ + +[![Crowdin](https://badges.crowdin.net/gitea/localized.svg)](https://crowdin.com/project/gitea) + ## Further information For more information and instructions about how to install Gitea, please look at our [documentation](https://docs.gitea.io/en-us/). @@ -150,7 +160,7 @@ We're [working on it](https://github.com/go-gitea/gitea/issues/1029). ## License This project is licensed under the MIT License. -See the [LICENSE](https://github.com/go-gitea/gitea/blob/master/LICENSE) file +See the [LICENSE](https://github.com/go-gitea/gitea/blob/main/LICENSE) file for the full license text. ## Screenshots diff --git a/README_ZH.md b/README_ZH.md index 037f27dc9d35..8e9575e35ce8 100644 --- a/README_ZH.md +++ b/README_ZH.md @@ -1,13 +1,13 @@

- Gitea + Gitea

Gitea - Git with a cup of tea

- + @@ -16,7 +16,7 @@ - + @@ -71,6 +71,11 @@ Gitea 的首要目标是创建一个极易安装,运行非常快速,安装 Fork -> Patch -> Push -> Pull Request +## 翻译 + +多语言翻译是基于Crowdin进行的. +[![Crowdin](https://badges.crowdin.net/gitea/localized.svg)](https://crowdin.com/project/gitea) + ## 作者 * [Maintainers](https://github.com/orgs/go-gitea/people) @@ -79,7 +84,7 @@ Fork -> Patch -> Push -> Pull Request ## 授权许可 -本项目采用 MIT 开源授权许可证,完整的授权说明已放置在 [LICENSE](https://github.com/go-gitea/gitea/blob/master/LICENSE) 文件中。 +本项目采用 MIT 开源授权许可证,完整的授权说明已放置在 [LICENSE](https://github.com/go-gitea/gitea/blob/main/LICENSE) 文件中。 ## 截图 diff --git a/build.go b/build.go index ab57fb1d9a0e..b843465dca61 100644 --- a/build.go +++ b/build.go @@ -10,14 +10,6 @@ package main // These libraries will not be included in a normal compilation. import ( - // for lint - _ "github.com/mgechev/dots" - _ "github.com/mgechev/revive/formatter" - _ "github.com/mgechev/revive/lint" - _ "github.com/mgechev/revive/rule" - _ "github.com/mitchellh/go-homedir" - _ "github.com/pelletier/go-toml" - // for embed _ "github.com/shurcooL/vfsgen" diff --git a/build/lint.go b/build/lint.go deleted file mode 100644 index 60e93697aa7a..000000000000 --- a/build/lint.go +++ /dev/null @@ -1,325 +0,0 @@ -// Copyright 2020 The Gitea Authors. All rights reserved. -// Copyright (c) 2018 Minko Gechev. All rights reserved. -// Use of this source code is governed by a MIT-style -// license that can be found in the LICENSE file. - -// +build ignore - -package main - -import ( - "flag" - "fmt" - "io/ioutil" - "os" - "path/filepath" - "strings" - - "github.com/mgechev/dots" - "github.com/mgechev/revive/formatter" - "github.com/mgechev/revive/lint" - "github.com/mgechev/revive/rule" - "github.com/mitchellh/go-homedir" - "github.com/pelletier/go-toml" -) - -func fail(err string) { - fmt.Fprintln(os.Stderr, err) - os.Exit(1) -} - -var defaultRules = []lint.Rule{ - &rule.VarDeclarationsRule{}, - &rule.PackageCommentsRule{}, - &rule.DotImportsRule{}, - &rule.BlankImportsRule{}, - &rule.ExportedRule{}, - &rule.VarNamingRule{}, - &rule.IndentErrorFlowRule{}, - &rule.IfReturnRule{}, - &rule.RangeRule{}, - &rule.ErrorfRule{}, - &rule.ErrorNamingRule{}, - &rule.ErrorStringsRule{}, - &rule.ReceiverNamingRule{}, - &rule.IncrementDecrementRule{}, - &rule.ErrorReturnRule{}, - &rule.UnexportedReturnRule{}, - &rule.TimeNamingRule{}, - &rule.ContextKeysType{}, - &rule.ContextAsArgumentRule{}, -} - -var allRules = append([]lint.Rule{ - &rule.ArgumentsLimitRule{}, - &rule.CyclomaticRule{}, - &rule.FileHeaderRule{}, - &rule.EmptyBlockRule{}, - &rule.SuperfluousElseRule{}, - &rule.ConfusingNamingRule{}, - &rule.GetReturnRule{}, - &rule.ModifiesParamRule{}, - &rule.ConfusingResultsRule{}, - &rule.DeepExitRule{}, - &rule.UnusedParamRule{}, - &rule.UnreachableCodeRule{}, - &rule.AddConstantRule{}, - &rule.FlagParamRule{}, - &rule.UnnecessaryStmtRule{}, - &rule.StructTagRule{}, - &rule.ModifiesValRecRule{}, - &rule.ConstantLogicalExprRule{}, - &rule.BoolLiteralRule{}, - &rule.RedefinesBuiltinIDRule{}, - &rule.ImportsBlacklistRule{}, - &rule.FunctionResultsLimitRule{}, - &rule.MaxPublicStructsRule{}, - &rule.RangeValInClosureRule{}, - &rule.RangeValAddress{}, - &rule.WaitGroupByValueRule{}, - &rule.AtomicRule{}, - &rule.EmptyLinesRule{}, - &rule.LineLengthLimitRule{}, - &rule.CallToGCRule{}, - &rule.DuplicatedImportsRule{}, - &rule.ImportShadowingRule{}, - &rule.BareReturnRule{}, - &rule.UnusedReceiverRule{}, - &rule.UnhandledErrorRule{}, - &rule.CognitiveComplexityRule{}, - &rule.StringOfIntRule{}, -}, defaultRules...) - -var allFormatters = []lint.Formatter{ - &formatter.Stylish{}, - &formatter.Friendly{}, - &formatter.JSON{}, - &formatter.NDJSON{}, - &formatter.Default{}, - &formatter.Unix{}, - &formatter.Checkstyle{}, - &formatter.Plain{}, -} - -func getFormatters() map[string]lint.Formatter { - result := map[string]lint.Formatter{} - for _, f := range allFormatters { - result[f.Name()] = f - } - return result -} - -func getLintingRules(config *lint.Config) []lint.Rule { - rulesMap := map[string]lint.Rule{} - for _, r := range allRules { - rulesMap[r.Name()] = r - } - - lintingRules := []lint.Rule{} - for name := range config.Rules { - rule, ok := rulesMap[name] - if !ok { - fail("cannot find rule: " + name) - } - lintingRules = append(lintingRules, rule) - } - - return lintingRules -} - -func parseConfig(path string) *lint.Config { - config := &lint.Config{} - file, err := ioutil.ReadFile(path) - if err != nil { - fail("cannot read the config file") - } - err = toml.Unmarshal(file, config) - if err != nil { - fail("cannot parse the config file: " + err.Error()) - } - return config -} - -func normalizeConfig(config *lint.Config) { - if config.Confidence == 0 { - config.Confidence = 0.8 - } - severity := config.Severity - if severity != "" { - for k, v := range config.Rules { - if v.Severity == "" { - v.Severity = severity - } - config.Rules[k] = v - } - for k, v := range config.Directives { - if v.Severity == "" { - v.Severity = severity - } - config.Directives[k] = v - } - } -} - -func getConfig() *lint.Config { - config := defaultConfig() - if configPath != "" { - config = parseConfig(configPath) - } - normalizeConfig(config) - return config -} - -func getFormatter() lint.Formatter { - formatters := getFormatters() - formatter := formatters["default"] - if formatterName != "" { - f, ok := formatters[formatterName] - if !ok { - fail("unknown formatter " + formatterName) - } - formatter = f - } - return formatter -} - -func buildDefaultConfigPath() string { - var result string - if homeDir, err := homedir.Dir(); err == nil { - result = filepath.Join(homeDir, "revive.toml") - if _, err := os.Stat(result); err != nil { - result = "" - } - } - - return result -} - -func defaultConfig() *lint.Config { - defaultConfig := lint.Config{ - Confidence: 0.0, - Severity: lint.SeverityWarning, - Rules: map[string]lint.RuleConfig{}, - } - for _, r := range defaultRules { - defaultConfig.Rules[r.Name()] = lint.RuleConfig{} - } - return &defaultConfig -} - -func normalizeSplit(strs []string) []string { - res := []string{} - for _, s := range strs { - t := strings.Trim(s, " \t") - if len(t) > 0 { - res = append(res, t) - } - } - return res -} - -func getPackages() [][]string { - globs := normalizeSplit(flag.Args()) - if len(globs) == 0 { - globs = append(globs, ".") - } - - packages, err := dots.ResolvePackages(globs, normalizeSplit(excludePaths)) - if err != nil { - fail(err.Error()) - } - - return packages -} - -type arrayFlags []string - -func (i *arrayFlags) String() string { - return strings.Join([]string(*i), " ") -} - -func (i *arrayFlags) Set(value string) error { - *i = append(*i, value) - return nil -} - -var configPath string -var excludePaths arrayFlags -var formatterName string -var help bool - -var originalUsage = flag.Usage - -func init() { - flag.Usage = func() { - originalUsage() - } - // command line help strings - const ( - configUsage = "path to the configuration TOML file, defaults to $HOME/revive.toml, if present (i.e. -config myconf.toml)" - excludeUsage = "list of globs which specify files to be excluded (i.e. -exclude foo/...)" - formatterUsage = "formatter to be used for the output (i.e. -formatter stylish)" - ) - - defaultConfigPath := buildDefaultConfigPath() - - flag.StringVar(&configPath, "config", defaultConfigPath, configUsage) - flag.Var(&excludePaths, "exclude", excludeUsage) - flag.StringVar(&formatterName, "formatter", "", formatterUsage) - flag.Parse() -} - -func main() { - config := getConfig() - formatter := getFormatter() - packages := getPackages() - - revive := lint.New(func(file string) ([]byte, error) { - return ioutil.ReadFile(file) - }) - - lintingRules := getLintingRules(config) - - failures, err := revive.Lint(packages, lintingRules, *config) - if err != nil { - fail(err.Error()) - } - - formatChan := make(chan lint.Failure) - exitChan := make(chan bool) - - var output string - go (func() { - output, err = formatter.Format(formatChan, *config) - if err != nil { - fail(err.Error()) - } - exitChan <- true - })() - - exitCode := 0 - for f := range failures { - if f.Confidence < config.Confidence { - continue - } - if exitCode == 0 { - exitCode = config.WarningCode - } - if c, ok := config.Rules[f.RuleName]; ok && c.Severity == lint.SeverityError { - exitCode = config.ErrorCode - } - if c, ok := config.Directives[f.RuleName]; ok && c.Severity == lint.SeverityError { - exitCode = config.ErrorCode - } - - formatChan <- f - } - - close(formatChan) - <-exitChan - if output != "" { - fmt.Println(output) - } - - os.Exit(exitCode) -} diff --git a/cmd/cmd.go b/cmd/cmd.go index bb768cc159d6..8d9d1ee077ed 100644 --- a/cmd/cmd.go +++ b/cmd/cmd.go @@ -7,9 +7,13 @@ package cmd import ( + "context" "errors" "fmt" + "os" + "os/signal" "strings" + "syscall" "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/setting" @@ -66,3 +70,25 @@ func initDBDisableConsole(disableConsole bool) error { } return nil } + +func installSignals() (context.Context, context.CancelFunc) { + ctx, cancel := context.WithCancel(context.Background()) + go func() { + // install notify + signalChannel := make(chan os.Signal, 1) + + signal.Notify( + signalChannel, + syscall.SIGINT, + syscall.SIGTERM, + ) + select { + case <-signalChannel: + case <-ctx.Done(): + } + cancel() + signal.Reset() + }() + + return ctx, cancel +} diff --git a/cmd/convert.go b/cmd/convert.go index 23a3d8dbe9d5..e2ffd403acbc 100644 --- a/cmd/convert.go +++ b/cmd/convert.go @@ -27,10 +27,10 @@ func runConvert(ctx *cli.Context) error { return err } - log.Trace("AppPath: %s", setting.AppPath) - log.Trace("AppWorkPath: %s", setting.AppWorkPath) - log.Trace("Custom path: %s", setting.CustomPath) - log.Trace("Log path: %s", setting.LogRootPath) + log.Info("AppPath: %s", setting.AppPath) + log.Info("AppWorkPath: %s", setting.AppWorkPath) + log.Info("Custom path: %s", setting.CustomPath) + log.Info("Log path: %s", setting.LogRootPath) setting.InitDBConfig() if !setting.Database.UseMySQL { diff --git a/cmd/dump.go b/cmd/dump.go index 11b9d201c302..ac98065f07b5 100644 --- a/cmd/dump.go +++ b/cmd/dump.go @@ -298,7 +298,7 @@ func runDump(ctx *cli.Context) error { } if ctx.IsSet("skip-custom-dir") && ctx.Bool("skip-custom-dir") { - log.Info("Skiping custom directory") + log.Info("Skipping custom directory") } else { customDir, err := os.Stat(setting.CustomPath) if err == nil && customDir.IsDir() { diff --git a/cmd/dump_repo.go b/cmd/dump_repo.go index cea640b53438..69813e3c872f 100644 --- a/cmd/dump_repo.go +++ b/cmd/dump_repo.go @@ -69,7 +69,7 @@ var CmdDumpRepository = cli.Command{ cli.StringFlag{ Name: "units", Value: "", - Usage: `Which items will be migrated, one or more units should be separated as comma. + Usage: `Which items will be migrated, one or more units should be separated as comma. wiki, issues, labels, releases, release_assets, milestones, pull_requests, comments are allowed. Empty means all units.`, }, }, @@ -80,10 +80,10 @@ func runDumpRepository(ctx *cli.Context) error { return err } - log.Trace("AppPath: %s", setting.AppPath) - log.Trace("AppWorkPath: %s", setting.AppWorkPath) - log.Trace("Custom path: %s", setting.CustomPath) - log.Trace("Log path: %s", setting.LogRootPath) + log.Info("AppPath: %s", setting.AppPath) + log.Info("AppWorkPath: %s", setting.AppWorkPath) + log.Info("Custom path: %s", setting.CustomPath) + log.Info("Log path: %s", setting.LogRootPath) setting.InitDBConfig() var ( diff --git a/cmd/embedded.go b/cmd/embedded.go index 363b85c066ea..528f32402e1a 100644 --- a/cmd/embedded.go +++ b/cmd/embedded.go @@ -19,6 +19,7 @@ import ( "code.gitea.io/gitea/modules/public" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/templates" + "code.gitea.io/gitea/modules/util" "github.com/gobwas/glob" "github.com/urfave/cli" @@ -271,7 +272,7 @@ func extractAsset(d string, a asset, overwrite, rename bool) error { } else if !fi.Mode().IsRegular() { return fmt.Errorf("%s already exists, but it's not a regular file", dest) } else if rename { - if err := os.Rename(dest, dest+".bak"); err != nil { + if err := util.Rename(dest, dest+".bak"); err != nil { return fmt.Errorf("Error creating backup for %s: %v", dest, err) } // Attempt to respect file permissions mask (even if user:group will be set anew) diff --git a/cmd/generate.go b/cmd/generate.go index 13a99c94f462..35c77a815b1d 100644 --- a/cmd/generate.go +++ b/cmd/generate.go @@ -71,7 +71,7 @@ func runGenerateInternalToken(c *cli.Context) error { } func runGenerateLfsJwtSecret(c *cli.Context) error { - JWTSecretBase64, err := generate.NewJwtSecret() + JWTSecretBase64, err := generate.NewJwtSecretBase64() if err != nil { return err } diff --git a/cmd/hook.go b/cmd/hook.go index def3b636eb67..87f1f37562e2 100644 --- a/cmd/hook.go +++ b/cmd/hook.go @@ -152,20 +152,21 @@ func runHookPreReceive(c *cli.Context) error { if os.Getenv(models.EnvIsInternal) == "true" { return nil } + ctx, cancel := installSignals() + defer cancel() setup("hooks/pre-receive.log", c.Bool("debug")) if len(os.Getenv("SSH_ORIGINAL_COMMAND")) == 0 { if setting.OnlyAllowPushIfGiteaEnvironmentSet { - fail(`Rejecting changes as Gitea environment not set. + return fail(`Rejecting changes as Gitea environment not set. If you are pushing over SSH you must push with a key managed by Gitea or set your environment appropriately.`, "") - } else { - return nil } + return nil } - // the environment setted on serv command + // the environment is set by serv command isWiki := os.Getenv(models.EnvRepoIsWiki) == "true" username := os.Getenv(models.EnvRepoUsername) reponame := os.Getenv(models.EnvRepoName) @@ -179,7 +180,7 @@ Gitea or set your environment appropriately.`, "") GitObjectDirectory: os.Getenv(private.GitObjectDirectory), GitQuarantinePath: os.Getenv(private.GitQuarantinePath), GitPushOptions: pushOptions(), - ProtectedBranchID: prID, + PullRequestID: prID, IsDeployKey: isDeployKey, } @@ -221,8 +222,8 @@ Gitea or set your environment appropriately.`, "") total++ lastline++ - // If the ref is a branch, check if it's protected - if strings.HasPrefix(refFullName, git.BranchPrefix) { + // If the ref is a branch or tag, check if it's protected + if strings.HasPrefix(refFullName, git.BranchPrefix) || strings.HasPrefix(refFullName, git.TagPrefix) { oldCommitIDs[count] = oldCommitID newCommitIDs[count] = newCommitID refFullNames[count] = refFullName @@ -230,19 +231,19 @@ Gitea or set your environment appropriately.`, "") fmt.Fprintf(out, "*") if count >= hookBatchSize { - fmt.Fprintf(out, " Checking %d branches\n", count) + fmt.Fprintf(out, " Checking %d references\n", count) hookOptions.OldCommitIDs = oldCommitIDs hookOptions.NewCommitIDs = newCommitIDs hookOptions.RefFullNames = refFullNames - statusCode, msg := private.HookPreReceive(username, reponame, hookOptions) + statusCode, msg := private.HookPreReceive(ctx, username, reponame, hookOptions) switch statusCode { case http.StatusOK: // no-op case http.StatusInternalServerError: - fail("Internal Server Error", msg) + return fail("Internal Server Error", msg) default: - fail(msg, "") + return fail(msg, "") } count = 0 lastline = 0 @@ -261,14 +262,14 @@ Gitea or set your environment appropriately.`, "") hookOptions.NewCommitIDs = newCommitIDs[:count] hookOptions.RefFullNames = refFullNames[:count] - fmt.Fprintf(out, " Checking %d branches\n", count) + fmt.Fprintf(out, " Checking %d references\n", count) - statusCode, msg := private.HookPreReceive(username, reponame, hookOptions) + statusCode, msg := private.HookPreReceive(ctx, username, reponame, hookOptions) switch statusCode { case http.StatusInternalServerError: - fail("Internal Server Error", msg) + return fail("Internal Server Error", msg) case http.StatusForbidden: - fail(msg, "") + return fail(msg, "") } } else if lastline > 0 { fmt.Fprintf(out, "\n") @@ -285,8 +286,11 @@ func runHookUpdate(c *cli.Context) error { } func runHookPostReceive(c *cli.Context) error { + ctx, cancel := installSignals() + defer cancel() + // First of all run update-server-info no matter what - if _, err := git.NewCommand("update-server-info").Run(); err != nil { + if _, err := git.NewCommand("update-server-info").SetParentContext(ctx).Run(); err != nil { return fmt.Errorf("Failed to call 'git update-server-info': %v", err) } @@ -299,12 +303,11 @@ func runHookPostReceive(c *cli.Context) error { if len(os.Getenv("SSH_ORIGINAL_COMMAND")) == 0 { if setting.OnlyAllowPushIfGiteaEnvironmentSet { - fail(`Rejecting changes as Gitea environment not set. + return fail(`Rejecting changes as Gitea environment not set. If you are pushing over SSH you must push with a key managed by Gitea or set your environment appropriately.`, "") - } else { - return nil } + return nil } var out io.Writer @@ -320,7 +323,7 @@ Gitea or set your environment appropriately.`, "") } } - // the environment setted on serv command + // the environment is set by serv command repoUser := os.Getenv(models.EnvRepoUsername) isWiki := os.Getenv(models.EnvRepoIsWiki) == "true" repoName := os.Getenv(models.EnvRepoName) @@ -371,11 +374,11 @@ Gitea or set your environment appropriately.`, "") hookOptions.OldCommitIDs = oldCommitIDs hookOptions.NewCommitIDs = newCommitIDs hookOptions.RefFullNames = refFullNames - resp, err := private.HookPostReceive(repoUser, repoName, hookOptions) + resp, err := private.HookPostReceive(ctx, repoUser, repoName, hookOptions) if resp == nil { _ = dWriter.Close() hookPrintResults(results) - fail("Internal Server Error", err) + return fail("Internal Server Error", err) } wasEmpty = wasEmpty || resp.RepoWasEmpty results = append(results, resp.Results...) @@ -386,9 +389,9 @@ Gitea or set your environment appropriately.`, "") if count == 0 { if wasEmpty && masterPushed { // We need to tell the repo to reset the default branch to master - err := private.SetDefaultBranch(repoUser, repoName, "master") + err := private.SetDefaultBranch(ctx, repoUser, repoName, "master") if err != nil { - fail("Internal Server Error", "SetDefaultBranch failed with Error: %v", err) + return fail("Internal Server Error", "SetDefaultBranch failed with Error: %v", err) } } fmt.Fprintf(out, "Processed %d references in total\n", total) @@ -404,11 +407,11 @@ Gitea or set your environment appropriately.`, "") fmt.Fprintf(out, " Processing %d references\n", count) - resp, err := private.HookPostReceive(repoUser, repoName, hookOptions) + resp, err := private.HookPostReceive(ctx, repoUser, repoName, hookOptions) if resp == nil { _ = dWriter.Close() hookPrintResults(results) - fail("Internal Server Error", err) + return fail("Internal Server Error", err) } wasEmpty = wasEmpty || resp.RepoWasEmpty results = append(results, resp.Results...) @@ -417,9 +420,9 @@ Gitea or set your environment appropriately.`, "") if wasEmpty && masterPushed { // We need to tell the repo to reset the default branch to master - err := private.SetDefaultBranch(repoUser, repoName, "master") + err := private.SetDefaultBranch(ctx, repoUser, repoName, "master") if err != nil { - fail("Internal Server Error", "SetDefaultBranch failed with Error: %v", err) + return fail("Internal Server Error", "SetDefaultBranch failed with Error: %v", err) } } _ = dWriter.Close() diff --git a/cmd/keys.go b/cmd/keys.go index 7456815cd77b..684aca64e22a 100644 --- a/cmd/keys.go +++ b/cmd/keys.go @@ -62,9 +62,12 @@ func runKeys(c *cli.Context) error { return errors.New("No key type and content provided") } + ctx, cancel := installSignals() + defer cancel() + setup("keys.log", false) - authorizedString, err := private.AuthorizedPublicKeyByContent(content) + authorizedString, err := private.AuthorizedPublicKeyByContent(ctx, content) if err != nil { return err } diff --git a/cmd/mailer.go b/cmd/mailer.go index ee11b56cc77b..1a4b0902e268 100644 --- a/cmd/mailer.go +++ b/cmd/mailer.go @@ -14,6 +14,9 @@ import ( ) func runSendMail(c *cli.Context) error { + ctx, cancel := installSignals() + defer cancel() + setting.NewContext() if err := argsSet(c, "title"); err != nil { @@ -39,7 +42,7 @@ func runSendMail(c *cli.Context) error { } } - status, message := private.SendEmail(subject, body, nil) + status, message := private.SendEmail(ctx, subject, body, nil) if status != http.StatusOK { fmt.Printf("error: %s\n", message) return nil diff --git a/cmd/manager.go b/cmd/manager.go index 20c7858682ac..99d283b4418e 100644 --- a/cmd/manager.go +++ b/cmd/manager.go @@ -236,10 +236,13 @@ func runRemoveLogger(c *cli.Context) error { group = log.DEFAULT } name := c.Args().First() - statusCode, msg := private.RemoveLogger(group, name) + ctx, cancel := installSignals() + defer cancel() + + statusCode, msg := private.RemoveLogger(ctx, group, name) switch statusCode { case http.StatusInternalServerError: - fail("InternalServerError", msg) + return fail("InternalServerError", msg) } fmt.Fprintln(os.Stdout, msg) @@ -371,10 +374,13 @@ func commonAddLogger(c *cli.Context, mode string, vals map[string]interface{}) e if c.IsSet("name") { name = c.String("name") } - statusCode, msg := private.AddLogger(group, name, mode, vals) + ctx, cancel := installSignals() + defer cancel() + + statusCode, msg := private.AddLogger(ctx, group, name, mode, vals) switch statusCode { case http.StatusInternalServerError: - fail("InternalServerError", msg) + return fail("InternalServerError", msg) } fmt.Fprintln(os.Stdout, msg) @@ -382,11 +388,14 @@ func commonAddLogger(c *cli.Context, mode string, vals map[string]interface{}) e } func runShutdown(c *cli.Context) error { + ctx, cancel := installSignals() + defer cancel() + setup("manager", c.Bool("debug")) - statusCode, msg := private.Shutdown() + statusCode, msg := private.Shutdown(ctx) switch statusCode { case http.StatusInternalServerError: - fail("InternalServerError", msg) + return fail("InternalServerError", msg) } fmt.Fprintln(os.Stdout, msg) @@ -394,11 +403,14 @@ func runShutdown(c *cli.Context) error { } func runRestart(c *cli.Context) error { + ctx, cancel := installSignals() + defer cancel() + setup("manager", c.Bool("debug")) - statusCode, msg := private.Restart() + statusCode, msg := private.Restart(ctx) switch statusCode { case http.StatusInternalServerError: - fail("InternalServerError", msg) + return fail("InternalServerError", msg) } fmt.Fprintln(os.Stdout, msg) @@ -406,11 +418,14 @@ func runRestart(c *cli.Context) error { } func runFlushQueues(c *cli.Context) error { + ctx, cancel := installSignals() + defer cancel() + setup("manager", c.Bool("debug")) - statusCode, msg := private.FlushQueues(c.Duration("timeout"), c.Bool("non-blocking")) + statusCode, msg := private.FlushQueues(ctx, c.Duration("timeout"), c.Bool("non-blocking")) switch statusCode { case http.StatusInternalServerError: - fail("InternalServerError", msg) + return fail("InternalServerError", msg) } fmt.Fprintln(os.Stdout, msg) @@ -418,11 +433,14 @@ func runFlushQueues(c *cli.Context) error { } func runPauseLogging(c *cli.Context) error { + ctx, cancel := installSignals() + defer cancel() + setup("manager", c.Bool("debug")) - statusCode, msg := private.PauseLogging() + statusCode, msg := private.PauseLogging(ctx) switch statusCode { case http.StatusInternalServerError: - fail("InternalServerError", msg) + return fail("InternalServerError", msg) } fmt.Fprintln(os.Stdout, msg) @@ -430,11 +448,14 @@ func runPauseLogging(c *cli.Context) error { } func runResumeLogging(c *cli.Context) error { + ctx, cancel := installSignals() + defer cancel() + setup("manager", c.Bool("debug")) - statusCode, msg := private.ResumeLogging() + statusCode, msg := private.ResumeLogging(ctx) switch statusCode { case http.StatusInternalServerError: - fail("InternalServerError", msg) + return fail("InternalServerError", msg) } fmt.Fprintln(os.Stdout, msg) @@ -442,11 +463,14 @@ func runResumeLogging(c *cli.Context) error { } func runReleaseReopenLogging(c *cli.Context) error { + ctx, cancel := installSignals() + defer cancel() + setup("manager", c.Bool("debug")) - statusCode, msg := private.ReleaseReopenLogging() + statusCode, msg := private.ReleaseReopenLogging(ctx) switch statusCode { case http.StatusInternalServerError: - fail("InternalServerError", msg) + return fail("InternalServerError", msg) } fmt.Fprintln(os.Stdout, msg) diff --git a/cmd/migrate.go b/cmd/migrate.go index 2428925887c9..23bc97b0c41f 100644 --- a/cmd/migrate.go +++ b/cmd/migrate.go @@ -28,10 +28,10 @@ func runMigrate(ctx *cli.Context) error { return err } - log.Trace("AppPath: %s", setting.AppPath) - log.Trace("AppWorkPath: %s", setting.AppWorkPath) - log.Trace("Custom path: %s", setting.CustomPath) - log.Trace("Log path: %s", setting.LogRootPath) + log.Info("AppPath: %s", setting.AppPath) + log.Info("AppWorkPath: %s", setting.AppWorkPath) + log.Info("Custom path: %s", setting.CustomPath) + log.Info("Log path: %s", setting.LogRootPath) setting.InitDBConfig() if err := models.NewEngine(context.Background(), migrations.Migrate); err != nil { diff --git a/cmd/migrate_storage.go b/cmd/migrate_storage.go index 871baed92de9..8123716f9ba9 100644 --- a/cmd/migrate_storage.go +++ b/cmd/migrate_storage.go @@ -110,10 +110,10 @@ func runMigrateStorage(ctx *cli.Context) error { return err } - log.Trace("AppPath: %s", setting.AppPath) - log.Trace("AppWorkPath: %s", setting.AppWorkPath) - log.Trace("Custom path: %s", setting.CustomPath) - log.Trace("Log path: %s", setting.LogRootPath) + log.Info("AppPath: %s", setting.AppPath) + log.Info("AppWorkPath: %s", setting.AppWorkPath) + log.Info("Custom path: %s", setting.CustomPath) + log.Info("Log path: %s", setting.LogRootPath) setting.InitDBConfig() if err := models.NewEngine(context.Background(), migrations.Migrate); err != nil { @@ -184,7 +184,7 @@ func runMigrateStorage(ctx *cli.Context) error { return fmt.Errorf("Unsupported storage: %s", ctx.String("type")) } - log.Warn("All files have been copied to the new placement but old files are still on the orignial placement.") + log.Warn("All files have been copied to the new placement but old files are still on the original placement.") return nil } diff --git a/cmd/restore_repo.go b/cmd/restore_repo.go index 541995879bf4..1208796c9bde 100644 --- a/cmd/restore_repo.go +++ b/cmd/restore_repo.go @@ -5,15 +5,12 @@ package cmd import ( - "context" - "strings" + "errors" + "net/http" "code.gitea.io/gitea/modules/log" - "code.gitea.io/gitea/modules/migrations" - "code.gitea.io/gitea/modules/migrations/base" + "code.gitea.io/gitea/modules/private" "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/storage" - pull_service "code.gitea.io/gitea/services/pull" "github.com/urfave/cli" ) @@ -43,77 +40,29 @@ var CmdRestoreRepository = cli.Command{ cli.StringFlag{ Name: "units", Value: "", - Usage: `Which items will be restored, one or more units should be separated as comma. + Usage: `Which items will be restored, one or more units should be separated as comma. wiki, issues, labels, releases, release_assets, milestones, pull_requests, comments are allowed. Empty means all units.`, }, }, } -func runRestoreRepository(ctx *cli.Context) error { - if err := initDB(); err != nil { - return err - } - - log.Trace("AppPath: %s", setting.AppPath) - log.Trace("AppWorkPath: %s", setting.AppWorkPath) - log.Trace("Custom path: %s", setting.CustomPath) - log.Trace("Log path: %s", setting.LogRootPath) - setting.InitDBConfig() +func runRestoreRepository(c *cli.Context) error { + ctx, cancel := installSignals() + defer cancel() - if err := storage.Init(); err != nil { - return err - } - - if err := pull_service.Init(); err != nil { - return err - } - - var opts = base.MigrateOptions{ - RepoName: ctx.String("repo_name"), - } - - if len(ctx.String("units")) == 0 { - opts.Wiki = true - opts.Issues = true - opts.Milestones = true - opts.Labels = true - opts.Releases = true - opts.Comments = true - opts.PullRequests = true - opts.ReleaseAssets = true - } else { - units := strings.Split(ctx.String("units"), ",") - for _, unit := range units { - switch strings.ToLower(unit) { - case "wiki": - opts.Wiki = true - case "issues": - opts.Issues = true - case "milestones": - opts.Milestones = true - case "labels": - opts.Labels = true - case "releases": - opts.Releases = true - case "release_assets": - opts.ReleaseAssets = true - case "comments": - opts.Comments = true - case "pull_requests": - opts.PullRequests = true - } - } - } + setting.NewContext() - if err := migrations.RestoreRepository( - context.Background(), - ctx.String("repo_dir"), - ctx.String("owner_name"), - ctx.String("repo_name"), - ); err != nil { - log.Fatal("Failed to restore repository: %v", err) - return err + statusCode, errStr := private.RestoreRepo( + ctx, + c.String("repo_dir"), + c.String("owner_name"), + c.String("repo_name"), + c.StringSlice("units"), + ) + if statusCode == http.StatusOK { + return nil } - return nil + log.Fatal("Failed to restore repository: %v", errStr) + return errors.New(errStr) } diff --git a/cmd/serv.go b/cmd/serv.go index 56167f63a878..97ae901d270e 100644 --- a/cmd/serv.go +++ b/cmd/serv.go @@ -72,7 +72,10 @@ var ( alphaDashDotPattern = regexp.MustCompile(`[^\w-\.]`) ) -func fail(userMessage, logMessage string, args ...interface{}) { +func fail(userMessage, logMessage string, args ...interface{}) error { + // There appears to be a chance to cause a zombie process and failure to read the Exit status + // if nothing is outputted on stdout. + fmt.Fprintln(os.Stdout, "") fmt.Fprintln(os.Stderr, "Gitea:", userMessage) if len(logMessage) > 0 { @@ -80,11 +83,19 @@ func fail(userMessage, logMessage string, args ...interface{}) { fmt.Fprintf(os.Stderr, logMessage+"\n", args...) } } + ctx, cancel := installSignals() + defer cancel() - os.Exit(1) + if len(logMessage) > 0 { + _ = private.SSHLog(ctx, true, fmt.Sprintf(logMessage+": ", args...)) + } + return cli.NewExitError(fmt.Sprintf("Gitea: %s", userMessage), 1) } func runServ(c *cli.Context) error { + ctx, cancel := installSignals() + defer cancel() + // FIXME: This needs to internationalised setup("serv.log", c.Bool("debug")) @@ -102,18 +113,18 @@ func runServ(c *cli.Context) error { keys := strings.Split(c.Args()[0], "-") if len(keys) != 2 || keys[0] != "key" { - fail("Key ID format error", "Invalid key argument: %s", c.Args()[0]) + return fail("Key ID format error", "Invalid key argument: %s", c.Args()[0]) } keyID, err := strconv.ParseInt(keys[1], 10, 64) if err != nil { - fail("Key ID format error", "Invalid key argument: %s", c.Args()[1]) + return fail("Key ID format error", "Invalid key argument: %s", c.Args()[1]) } cmd := os.Getenv("SSH_ORIGINAL_COMMAND") if len(cmd) == 0 { - key, user, err := private.ServNoCommand(keyID) + key, user, err := private.ServNoCommand(ctx, keyID) if err != nil { - fail("Internal error", "Failed to check provided key: %v", err) + return fail("Internal error", "Failed to check provided key: %v", err) } switch key.Type { case models.KeyTypeDeploy: @@ -131,11 +142,11 @@ func runServ(c *cli.Context) error { words, err := shellquote.Split(cmd) if err != nil { - fail("Error parsing arguments", "Failed to parse arguments: %v", err) + return fail("Error parsing arguments", "Failed to parse arguments: %v", err) } if len(words) < 2 { - fail("Too few arguments", "Too few arguments in cmd: %s", cmd) + return fail("Too few arguments", "Too few arguments in cmd: %s", cmd) } verb := words[0] @@ -147,7 +158,7 @@ func runServ(c *cli.Context) error { var lfsVerb string if verb == lfsAuthenticateVerb { if !setting.LFS.StartServer { - fail("Unknown git command", "LFS authentication request over SSH denied, LFS support is disabled") + return fail("Unknown git command", "LFS authentication request over SSH denied, LFS support is disabled") } if len(words) > 2 { @@ -160,37 +171,37 @@ func runServ(c *cli.Context) error { rr := strings.SplitN(repoPath, "/", 2) if len(rr) != 2 { - fail("Invalid repository path", "Invalid repository path: %v", repoPath) + return fail("Invalid repository path", "Invalid repository path: %v", repoPath) } username := strings.ToLower(rr[0]) reponame := strings.ToLower(strings.TrimSuffix(rr[1], ".git")) if alphaDashDotPattern.MatchString(reponame) { - fail("Invalid repo name", "Invalid repo name: %s", reponame) + return fail("Invalid repo name", "Invalid repo name: %s", reponame) } if setting.EnablePprof || c.Bool("enable-pprof") { if err := os.MkdirAll(setting.PprofDataPath, os.ModePerm); err != nil { - fail("Error while trying to create PPROF_DATA_PATH", "Error while trying to create PPROF_DATA_PATH: %v", err) + return fail("Error while trying to create PPROF_DATA_PATH", "Error while trying to create PPROF_DATA_PATH: %v", err) } stopCPUProfiler, err := pprof.DumpCPUProfileForUsername(setting.PprofDataPath, username) if err != nil { - fail("Internal Server Error", "Unable to start CPU profile: %v", err) + return fail("Internal Server Error", "Unable to start CPU profile: %v", err) } defer func() { stopCPUProfiler() err := pprof.DumpMemProfileForUsername(setting.PprofDataPath, username) if err != nil { - fail("Internal Server Error", "Unable to dump Mem Profile: %v", err) + _ = fail("Internal Server Error", "Unable to dump Mem Profile: %v", err) } }() } requestedMode, has := allowedCommands[verb] if !has { - fail("Unknown git command", "Unknown git command %s", verb) + return fail("Unknown git command", "Unknown git command %s", verb) } if verb == lfsAuthenticateVerb { @@ -199,21 +210,20 @@ func runServ(c *cli.Context) error { } else if lfsVerb == "download" { requestedMode = models.AccessModeRead } else { - fail("Unknown LFS verb", "Unknown lfs verb %s", lfsVerb) + return fail("Unknown LFS verb", "Unknown lfs verb %s", lfsVerb) } } - results, err := private.ServCommand(keyID, username, reponame, requestedMode, verb, lfsVerb) + results, err := private.ServCommand(ctx, keyID, username, reponame, requestedMode, verb, lfsVerb) if err != nil { if private.IsErrServCommand(err) { errServCommand := err.(private.ErrServCommand) if errServCommand.StatusCode != http.StatusInternalServerError { - fail("Unauthorized", "%s", errServCommand.Error()) - } else { - fail("Internal Server Error", "%s", errServCommand.Error()) + return fail("Unauthorized", "%s", errServCommand.Error()) } + return fail("Internal Server Error", "%s", errServCommand.Error()) } - fail("Internal Server Error", "%s", err.Error()) + return fail("Internal Server Error", "%s", err.Error()) } os.Setenv(models.EnvRepoIsWiki, strconv.FormatBool(results.IsWiki)) os.Setenv(models.EnvRepoName, results.RepoName) @@ -246,7 +256,7 @@ func runServ(c *cli.Context) error { // Sign and get the complete encoded token as a string using the secret tokenString, err := token.SignedString(setting.LFS.JWTSecretBytes) if err != nil { - fail("Internal error", "Failed to sign JWT token: %v", err) + return fail("Internal error", "Failed to sign JWT token: %v", err) } tokenAuthentication := &models.LFSTokenResponse{ @@ -259,7 +269,7 @@ func runServ(c *cli.Context) error { enc := json.NewEncoder(os.Stdout) err = enc.Encode(tokenAuthentication) if err != nil { - fail("Internal error", "Failed to encode LFS json response: %v", err) + return fail("Internal error", "Failed to encode LFS json response: %v", err) } return nil } @@ -272,9 +282,9 @@ func runServ(c *cli.Context) error { var gitcmd *exec.Cmd verbs := strings.Split(verb, " ") if len(verbs) == 2 { - gitcmd = exec.Command(verbs[0], verbs[1], repoPath) + gitcmd = exec.CommandContext(ctx, verbs[0], verbs[1], repoPath) } else { - gitcmd = exec.Command(verb, repoPath) + gitcmd = exec.CommandContext(ctx, verb, repoPath) } gitcmd.Dir = setting.RepoRootPath @@ -282,13 +292,13 @@ func runServ(c *cli.Context) error { gitcmd.Stdin = os.Stdin gitcmd.Stderr = os.Stderr if err = gitcmd.Run(); err != nil { - fail("Internal error", "Failed to execute git command: %v", err) + return fail("Internal error", "Failed to execute git command: %v", err) } // Update user key activity. if results.KeyID > 0 { - if err = private.UpdatePublicKeyInRepo(results.KeyID, results.RepoID); err != nil { - fail("Internal error", "UpdatePublicKeyInRepo: %v", err) + if err = private.UpdatePublicKeyInRepo(ctx, results.KeyID, results.RepoID); err != nil { + return fail("Internal error", "UpdatePublicKeyInRepo: %v", err) } } diff --git a/cmd/web.go b/cmd/web.go index 423917ba4e11..6953e7c64f5d 100644 --- a/cmd/web.go +++ b/cmd/web.go @@ -16,9 +16,8 @@ import ( "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/routers" - "code.gitea.io/gitea/routers/routes" + "code.gitea.io/gitea/routers/install" context2 "github.com/gorilla/context" "github.com/urfave/cli" @@ -48,6 +47,14 @@ and it takes care of all the other things for you`, Value: setting.PIDFile, Usage: "Custom pid file path", }, + cli.BoolFlag{ + Name: "quiet, q", + Usage: "Only display Fatal logging errors until logging is set-up", + }, + cli.BoolFlag{ + Name: "verbose", + Usage: "Set initial logging to TRACE level until logging is properly set-up", + }, }, } @@ -72,6 +79,14 @@ func runHTTPRedirector() { } func runWeb(ctx *cli.Context) error { + if ctx.Bool("verbose") { + _ = log.DelLogger("console") + log.NewLogger(0, "console", "console", fmt.Sprintf(`{"level": "trace", "colorize": %t, "stacktraceLevel": "none"}`, log.CanColorStdout)) + } else if ctx.Bool("quiet") { + _ = log.DelLogger("console") + log.NewLogger(0, "console", "console", fmt.Sprintf(`{"level": "fatal", "colorize": %t, "stacktraceLevel": "none"}`, log.CanColorStdout)) + } + managerCtx, cancel := context.WithCancel(context.Background()) graceful.InitManager(managerCtx) defer cancel() @@ -89,7 +104,7 @@ func runWeb(ctx *cli.Context) error { } // Perform pre-initialization - needsInstall := routers.PreInstallInit(graceful.GetManager().HammerContext()) + needsInstall := install.PreloadSettings(graceful.GetManager().HammerContext()) if needsInstall { // Flag for port number in case first time run conflict if ctx.IsSet("port") { @@ -102,7 +117,7 @@ func runWeb(ctx *cli.Context) error { return err } } - c := routes.InstallRoutes() + c := install.Routes() err := listen(c, false) select { case <-graceful.GetManager().IsShutdown(): @@ -135,7 +150,7 @@ func runWeb(ctx *cli.Context) error { } // Set up Chi routes - c := routes.NormalRoutes() + c := routers.NormalRoutes() err := listen(c, true) <-graceful.GetManager().Done() log.Info("PID: %d Gitea Web Finished", os.Getpid()) @@ -152,19 +167,6 @@ func setPort(port string) error { case setting.FCGI: case setting.FCGIUnix: default: - // Save LOCAL_ROOT_URL if port changed - cfg := ini.Empty() - isFile, err := util.IsFile(setting.CustomConf) - if err != nil { - log.Fatal("Unable to check if %s is a file", err) - } - if isFile { - // Keeps custom settings if there is already something. - if err := cfg.Append(setting.CustomConf); err != nil { - return fmt.Errorf("Failed to load custom conf '%s': %v", setting.CustomConf, err) - } - } - defaultLocalURL := string(setting.Protocol) + "://" if setting.HTTPAddr == "0.0.0.0" { defaultLocalURL += "localhost" @@ -173,10 +175,10 @@ func setPort(port string) error { } defaultLocalURL += ":" + setting.HTTPPort + "/" - cfg.Section("server").Key("LOCAL_ROOT_URL").SetValue(defaultLocalURL) - if err := cfg.SaveTo(setting.CustomConf); err != nil { - return fmt.Errorf("Error saving generated JWT Secret to custom config: %v", err) - } + // Save LOCAL_ROOT_URL if port changed + setting.CreateOrAppendToCustomConf(func(cfg *ini.File) { + cfg.Section("server").Key("LOCAL_ROOT_URL").SetValue(defaultLocalURL) + }) } return nil } diff --git a/cmd/web_letsencrypt.go b/cmd/web_letsencrypt.go index 387aacce2cbb..a68399979035 100644 --- a/cmd/web_letsencrypt.go +++ b/cmd/web_letsencrypt.go @@ -6,6 +6,7 @@ package cmd import ( "net/http" + "strconv" "strings" "code.gitea.io/gitea/modules/log" @@ -18,10 +19,19 @@ import ( func runLetsEncrypt(listenAddr, domain, directory, email string, m http.Handler) error { // If HTTP Challenge enabled, needs to be serving on port 80. For TLSALPN needs 443. - // Due to docker port mapping this can't be checked programatically + // Due to docker port mapping this can't be checked programmatically // TODO: these are placeholders until we add options for each in settings with appropriate warning enableHTTPChallenge := true enableTLSALPNChallenge := true + altHTTPPort := 0 + altTLSALPNPort := 0 + + if p, err := strconv.Atoi(setting.PortToRedirect); err == nil { + altHTTPPort = p + } + if p, err := strconv.Atoi(setting.HTTPPort); err == nil { + altTLSALPNPort = p + } magic := certmagic.NewDefault() magic.Storage = &certmagic.FileStorage{Path: directory} @@ -30,6 +40,9 @@ func runLetsEncrypt(listenAddr, domain, directory, email string, m http.Handler) Agreed: setting.LetsEncryptTOS, DisableHTTPChallenge: !enableHTTPChallenge, DisableTLSALPNChallenge: !enableTLSALPNChallenge, + ListenHost: setting.HTTPAddr, + AltTLSALPNPort: altTLSALPNPort, + AltHTTPPort: altHTTPPort, }) magic.Issuers = []certmagic.Issuer{myACME} @@ -41,6 +54,7 @@ func runLetsEncrypt(listenAddr, domain, directory, email string, m http.Handler) } tlsConfig := magic.TLSConfig() + tlsConfig.NextProtos = append(tlsConfig.NextProtos, "h2") if enableHTTPChallenge { go func() { diff --git a/contrib/environment-to-ini/environment-to-ini.go b/contrib/environment-to-ini/environment-to-ini.go index 74379e26af53..aade25190230 100644 --- a/contrib/environment-to-ini/environment-to-ini.go +++ b/contrib/environment-to-ini/environment-to-ini.go @@ -110,6 +110,8 @@ func runEnvironmentToIni(c *cli.Context) error { } cfg.NameMapper = ini.SnackCase + changed := false + prefix := c.String("prefix") + "__" for _, kv := range os.Environ() { @@ -143,15 +145,21 @@ func runEnvironmentToIni(c *cli.Context) error { continue } } + oldValue := key.Value() + if !changed && oldValue != value { + changed = true + } key.SetValue(value) } destination := c.String("out") if len(destination) == 0 { destination = setting.CustomConf } - err = cfg.SaveTo(destination) - if err != nil { - return err + if destination != setting.CustomConf || changed { + err = cfg.SaveTo(destination) + if err != nil { + return err + } } if c.Bool("clear") { for _, kv := range os.Environ() { diff --git a/contrib/pr/checkout.go b/contrib/pr/checkout.go index 9ee692fd35b1..902c9ea62347 100644 --- a/contrib/pr/checkout.go +++ b/contrib/pr/checkout.go @@ -26,12 +26,12 @@ import ( "time" "code.gitea.io/gitea/models" + gitea_git "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/markup/external" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/routers" - "code.gitea.io/gitea/routers/routes" "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/config" @@ -80,7 +80,7 @@ func runPR() { setting.RunUser = curUser.Username log.Printf("[PR] Loading fixtures data ...\n") - setting.CheckLFSVersion() + gitea_git.CheckLFSVersion() //models.LoadConfigs() /* setting.Database.Type = "sqlite3" @@ -116,7 +116,7 @@ func runPR() { //routers.GlobalInit() external.RegisterRenderers() markup.Init() - c := routes.NormalRoutes() + c := routers.NormalRoutes() log.Printf("[PR] Ready for testing !\n") log.Printf("[PR] Login with user1, user2, user3, ... with pass: password\n") diff --git a/contrib/systemd/gitea.service b/contrib/systemd/gitea.service index ac6a13ec573e..d6a4377ec809 100644 --- a/contrib/systemd/gitea.service +++ b/contrib/systemd/gitea.service @@ -3,14 +3,23 @@ Description=Gitea (Git with a cup of tea) After=syslog.target After=network.target ### -# Don't forget to add the database service requirements +# Don't forget to add the database service dependencies ### # -#Requires=mysql.service -#Requires=mariadb.service -#Requires=postgresql.service -#Requires=memcached.service -#Requires=redis.service +#Wants=mysql.service +#After=mysql.service +# +#Wants=mariadb.service +#After=mariadb.service +# +#Wants=postgresql.service +#After=postgresql.service +# +#Wants=memcached.service +#After=memcached.service +# +#Wants=redis.service +#After=redis.service # ### # If using socket activation for main http/s diff --git a/contrib/update_dependencies.sh b/contrib/update_dependencies.sh new file mode 100755 index 000000000000..f6f26c66cb02 --- /dev/null +++ b/contrib/update_dependencies.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +grep 'git' go.mod | grep '\.com' | grep -v indirect | grep -v replace | cut -f 2 | cut -d ' ' -f 1 | while read line; do + go get -u "$line" + make vendor + git add . + git commit -S -m "update $line" +done diff --git a/custom/conf/app.example.ini b/custom/conf/app.example.ini index 731d14627173..be360c62bece 100644 --- a/custom/conf/app.example.ini +++ b/custom/conf/app.example.ini @@ -1,1314 +1,2112 @@ ; This file lists the default values used by Gitea -; Copy required sections to your own app.ini (default is custom/conf/app.ini) -; and modify as needed. -; Do not copy the whole file as-is, as it contains some invalid sections for illustrative purposes. -; If you don't know what a setting is you should not set it. - -; see https://docs.gitea.io/en-us/config-cheat-sheet/ for additional documentation. - -; App name that shows in every page title -APP_NAME = Gitea: Git with a cup of tea -; Change it if you run locally -RUN_USER = git -; Application run mode, affects performance and debugging. Either "dev", "prod" or "test", default is "prod" -RUN_MODE = prod - -[project] -; Default templates for project boards -PROJECT_BOARD_BASIC_KANBAN_TYPE = To Do, In Progress, Done -PROJECT_BOARD_BUG_TRIAGE_TYPE = Needs Triage, High Priority, Low Priority, Closed - -[repository] -; Root path for storing all repository data. It must be an absolute path. By default, it is stored in a sub-directory of `APP_DATA_PATH`. -ROOT = -; The script type this server supports. Usually this is `bash`, but some users report that only `sh` is available. -SCRIPT_TYPE = bash -; DETECTED_CHARSETS_ORDER tie-break order for detected charsets. -; If the charsets have equal confidence, tie-breaking will be done by order in this list -; with charsets earlier in the list chosen in preference to those later. -; Adding "defaults" will place the unused charsets at that position. -DETECTED_CHARSETS_ORDER = UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, UTF-32LE, ISO-8859, windows-1252, ISO-8859, windows-1250, ISO-8859, ISO-8859, ISO-8859, windows-1253, ISO-8859, windows-1255, ISO-8859, windows-1251, windows-1256, KOI8-R, ISO-8859, windows-1254, Shift_JIS, GB18030, EUC-JP, EUC-KR, Big5, ISO-2022, ISO-2022, ISO-2022, IBM424_rtl, IBM424_ltr, IBM420_rtl, IBM420_ltr -; Default ANSI charset to override non-UTF-8 charsets to -ANSI_CHARSET = -; Force every new repository to be private -FORCE_PRIVATE = false -; Default privacy setting when creating a new repository, allowed values: last, private, public. Default is last which means the last setting used. -DEFAULT_PRIVATE = last -; Default private when using push-to-create -DEFAULT_PUSH_CREATE_PRIVATE = true -; Global limit of repositories per user, applied at creation time. -1 means no limit -MAX_CREATION_LIMIT = -1 -; Mirror sync queue length, increase if mirror syncing starts hanging -MIRROR_QUEUE_LENGTH = 1000 -; Patch test queue length, increase if pull request patch testing starts hanging -PULL_REQUEST_QUEUE_LENGTH = 1000 -; Preferred Licenses to place at the top of the List -; The name here must match the filename in conf/license or custom/conf/license -PREFERRED_LICENSES = Apache License 2.0,MIT License -; Disable the ability to interact with repositories using the HTTP protocol -DISABLE_HTTP_GIT = false -; Value for Access-Control-Allow-Origin header, default is not to present -; WARNING: This may be harmful to your website if you do not give it a right value. -ACCESS_CONTROL_ALLOW_ORIGIN = -; Force ssh:// clone url instead of scp-style uri when default SSH port is used -USE_COMPAT_SSH_URI = false -; Close issues as long as a commit on any branch marks it as fixed -DEFAULT_CLOSE_ISSUES_VIA_COMMITS_IN_ANY_BRANCH = false -; Allow users to push local repositories to Gitea and have them automatically created for a user or an org -ENABLE_PUSH_CREATE_USER = false -ENABLE_PUSH_CREATE_ORG = false -; Comma separated list of globally disabled repo units. Allowed values: repo.issues, repo.ext_issues, repo.pulls, repo.wiki, repo.ext_wiki -DISABLED_REPO_UNITS = -; Comma separated list of default repo units. Allowed values: repo.code, repo.releases, repo.issues, repo.pulls, repo.wiki, repo.projects. -; Note: Code and Releases can currently not be deactivated. If you specify default repo units you should still list them for future compatibility. -; External wiki and issue tracker can't be enabled by default as it requires additional settings. -; Disabled repo units will not be added to new repositories regardless if it is in the default list. -DEFAULT_REPO_UNITS = repo.code,repo.releases,repo.issues,repo.pulls,repo.wiki,repo.projects -; Prefix archive files by placing them in a directory named after the repository -PREFIX_ARCHIVE_FILES = true -; Disable the creation of new mirrors. Pre-existing mirrors remain valid. -DISABLE_MIRRORS = false -; Disable migrating feature. -DISABLE_MIGRATIONS = false -; Disable stars feature. -DISABLE_STARS = false -; The default branch name of new repositories -DEFAULT_BRANCH = master -; Allow adoption of unadopted repositories -ALLOW_ADOPTION_OF_UNADOPTED_REPOSITORIES = false -; Allow deletion of unadopted repositories -ALLOW_DELETION_OF_UNADOPTED_REPOSITORIES = false - -[repository.editor] -; List of file extensions for which lines should be wrapped in the Monaco editor -; Separate extensions with a comma. To line wrap files without an extension, just put a comma -LINE_WRAP_EXTENSIONS = .txt,.md,.markdown,.mdown,.mkd, -; Valid file modes that have a preview API associated with them, such as api/v1/markdown -; Separate the values by commas. The preview tab in edit mode won't be displayed if the file extension doesn't match -PREVIEWABLE_FILE_MODES = markdown - -[repository.local] -; Path for local repository copy. Defaults to `tmp/local-repo` -LOCAL_COPY_PATH = tmp/local-repo - -[repository.upload] -; Whether repository file uploads are enabled. Defaults to `true` -ENABLED = true -; Path for uploads. Defaults to `data/tmp/uploads` (tmp gets deleted on gitea restart) -TEMP_PATH = data/tmp/uploads -; Comma-separated list of allowed file extensions (`.zip`), mime types (`text/plain`) or wildcard type (`image/*`, `audio/*`, `video/*`). Empty value or `*/*` allows all types. -ALLOWED_TYPES = -; Max size of each file in megabytes. Defaults to 3MB -FILE_MAX_SIZE = 3 -; Max number of files per upload. Defaults to 5 -MAX_FILES = 5 - -[repository.pull-request] -; List of prefixes used in Pull Request title to mark them as Work In Progress -WORK_IN_PROGRESS_PREFIXES = WIP:,[WIP] -; List of keywords used in Pull Request comments to automatically close a related issue -CLOSE_KEYWORDS = close,closes,closed,fix,fixes,fixed,resolve,resolves,resolved -; List of keywords used in Pull Request comments to automatically reopen a related issue -REOPEN_KEYWORDS = reopen,reopens,reopened -; In the default merge message for squash commits include at most this many commits -DEFAULT_MERGE_MESSAGE_COMMITS_LIMIT = 50 -; In the default merge message for squash commits limit the size of the commit messages to this -DEFAULT_MERGE_MESSAGE_SIZE = 5120 -; In the default merge message for squash commits walk all commits to include all authors in the Co-authored-by otherwise just use those in the limited list -DEFAULT_MERGE_MESSAGE_ALL_AUTHORS = false -; In default merge messages limit the number of approvers listed as Reviewed-by: to this many -DEFAULT_MERGE_MESSAGE_MAX_APPROVERS = 10 -; In default merge messages only include approvers who are official -DEFAULT_MERGE_MESSAGE_OFFICIAL_APPROVERS_ONLY = true - -[repository.issue] -; List of reasons why a Pull Request or Issue can be locked -LOCK_REASONS = Too heated,Off-topic,Resolved,Spam - -[repository.release] -; Comma-separated list of allowed file extensions (`.zip`), mime types (`text/plain`) or wildcard type (`image/*`, `audio/*`, `video/*`). Empty value or `*/*` allows all types. -ALLOWED_TYPES = - -[repository.signing] -; GPG key to use to sign commits, Defaults to the default - that is the value of git config --get user.signingkey -; run in the context of the RUN_USER -; Switch to none to stop signing completely -SIGNING_KEY = default -; If a SIGNING_KEY ID is provided and is not set to default, use the provided Name and Email address as the signer. -; These should match a publicized name and email address for the key. (When SIGNING_KEY is default these are set to -; the results of git config --get user.name and git config --get user.email respectively and can only be overridden -; by setting the SIGNING_KEY ID to the correct ID.) -SIGNING_NAME = -SIGNING_EMAIL = -; Sets the default trust model for repositories. Options are: collaborator, committer, collaboratorcommitter -DEFAULT_TRUST_MODEL = collaborator -; Determines when gitea should sign the initial commit when creating a repository -; Either: -; - never -; - pubkey: only sign if the user has a pubkey -; - twofa: only sign if the user has logged in with twofa -; - always -; options other than none and always can be combined as comma separated list -INITIAL_COMMIT = always -; Determines when to sign for CRUD actions -; - as above -; - parentsigned: requires that the parent commit is signed. -CRUD_ACTIONS = pubkey, twofa, parentsigned -; Determines when to sign Wiki commits -; - as above -WIKI = never -; Determines when to sign on merges -; - basesigned: require that the parent of commit on the base repo is signed. -; - commitssigned: require that all the commits in the head branch are signed. -; - approved: only sign when merging an approved pr to a protected branch -MERGES = pubkey, twofa, basesigned, commitssigned - -[cors] -; More information about CORS can be found here: https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS#The_HTTP_response_headers -; enable cors headers (disabled by default) -ENABLED = false -; scheme of allowed requests -SCHEME = http -; list of requesting domains that are allowed -ALLOW_DOMAIN = * -; allow subdomains of headers listed above to request -ALLOW_SUBDOMAIN = false -; list of methods allowed to request -METHODS = GET,HEAD,POST,PUT,PATCH,DELETE,OPTIONS -; max time to cache response -MAX_AGE = 10m -; allow request with credentials -ALLOW_CREDENTIALS = false - -[ui] -; Number of repositories that are displayed on one explore page -EXPLORE_PAGING_NUM = 20 -; Number of issues that are displayed on one page -ISSUE_PAGING_NUM = 10 -; Number of maximum commits displayed in one activity feed -FEED_MAX_COMMIT_NUM = 5 -; Number of items that are displayed in home feed -FEED_PAGING_NUM = 20 -; Number of maximum commits displayed in commit graph. -GRAPH_MAX_COMMIT_NUM = 100 -; Number of line of codes shown for a code comment -CODE_COMMENT_LINES = 4 -; Value of `theme-color` meta tag, used by Android >= 5.0 -; An invalid color like "none" or "disable" will have the default style -; More info: https://developers.google.com/web/updates/2014/11/Support-for-theme-color-in-Chrome-39-for-Android -THEME_COLOR_META_TAG = `#6cc644` -; Max size of files to be displayed (default is 8MiB) -MAX_DISPLAY_FILE_SIZE = 8388608 -; Whether the email of the user should be shown in the Explore Users page -SHOW_USER_EMAIL = true -; Set the default theme for the Gitea install -DEFAULT_THEME = gitea -; All available themes. Allow users select personalized themes regardless of the value of `DEFAULT_THEME`. -THEMES = gitea,arc-green -;All available reactions users can choose on issues/prs and comments. -;Values can be emoji alias (:smile:) or a unicode emoji. -;For custom reactions, add a tightly cropped square image to public/emoji/img/reaction_name.png -REACTIONS = +1, -1, laugh, hooray, confused, heart, rocket, eyes -; Whether the full name of the users should be shown where possible. If the full name isn't set, the username will be used. -DEFAULT_SHOW_FULL_NAME = false -; Whether to search within description at repository search on explore page. -SEARCH_REPO_DESCRIPTION = true -; Whether to enable a Service Worker to cache frontend assets -USE_SERVICE_WORKER = true - -[ui.admin] -; Number of users that are displayed on one page -USER_PAGING_NUM = 50 -; Number of repos that are displayed on one page -REPO_PAGING_NUM = 50 -; Number of notices that are displayed on one page -NOTICE_PAGING_NUM = 25 -; Number of organizations that are displayed on one page -ORG_PAGING_NUM = 50 - -[ui.user] -; Number of repos that are displayed on one page -REPO_PAGING_NUM = 15 - -[ui.meta] -AUTHOR = Gitea - Git with a cup of tea -DESCRIPTION = Gitea (Git with a cup of tea) is a painless self-hosted Git service written in Go -KEYWORDS = go,git,self-hosted,gitea - -[ui.notification] -; Control how often the notification endpoint is polled to update the notification -; The timeout will increase to MAX_TIMEOUT in TIMEOUT_STEPs if the notification count is unchanged -; Set MIN_TIMEOUT to 0 to turn off -MIN_TIMEOUT = 10s -MAX_TIMEOUT = 60s -TIMEOUT_STEP = 10s -; This setting determines how often the db is queried to get the latest notification counts. -; If the browser client supports EventSource and SharedWorker, a SharedWorker will be used in preference to polling notification. Set to -1 to disable the EventSource -EVENT_SOURCE_UPDATE_TIME = 10s - -[ui.svg] -; Whether to render SVG files as images. If SVG rendering is disabled, SVG files are displayed as text and cannot be embedded in markdown files as images. -ENABLE_RENDER = true - -[ui.csv] -; Maximum allowed file size in bytes to render CSV files as table. (Set to 0 for no limit). -MAX_FILE_SIZE = 524288 - -[markdown] -; Render soft line breaks as hard line breaks, which means a single newline character between -; paragraphs will cause a line break and adding trailing whitespace to paragraphs is not -; necessary to force a line break. -; Render soft line breaks as hard line breaks for comments -ENABLE_HARD_LINE_BREAK_IN_COMMENTS = true -; Render soft line breaks as hard line breaks for markdown documents -ENABLE_HARD_LINE_BREAK_IN_DOCUMENTS = false -; Comma separated list of custom URL-Schemes that are allowed as links when rendering Markdown -; for example git,magnet,ftp (more at https://en.wikipedia.org/wiki/List_of_URI_schemes) -; URLs starting with http and https are always displayed, whatever is put in this entry. -CUSTOM_URL_SCHEMES = -; List of file extensions that should be rendered/edited as Markdown -; Separate the extensions with a comma. To render files without any extension as markdown, just put a comma -FILE_EXTENSIONS = .md,.markdown,.mdown,.mkd - +;; Copy required sections to your own app.ini (default is custom/conf/app.ini) +;; and modify as needed. +;; Do not copy the whole file as-is, as it contains some invalid sections for illustrative purposes. +;; If you don't know what a setting is you should not set it. +;; +;; see https://docs.gitea.io/en-us/config-cheat-sheet/ for additional documentation. + + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; General Settings +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; App name that shows in every page title +APP_NAME = ; Gitea: Git with a cup of tea +;; +;; RUN_USER will automatically detect the current user - but you can set it here change it if you run locally +RUN_USER = ; git +;; +;; Application run mode, affects performance and debugging. Either "dev", "prod" or "test", default is "prod" +RUN_MODE = ; prod + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; [server] -; The protocol the server listens on. One of 'http', 'https', 'unix' or 'fcgi'. -PROTOCOL = http -DOMAIN = localhost -ROOT_URL = %(PROTOCOL)s://%(DOMAIN)s:%(HTTP_PORT)s/ -; when STATIC_URL_PREFIX is empty it will follow ROOT_URL -STATIC_URL_PREFIX = -; The address to listen on. Either a IPv4/IPv6 address or the path to a unix socket. -HTTP_ADDR = 0.0.0.0 -; The port to listen on. Leave empty when using a unix socket. -HTTP_PORT = 3000 -; If REDIRECT_OTHER_PORT is true, and PROTOCOL is set to https an http server -; will be started on PORT_TO_REDIRECT and it will redirect plain, non-secure http requests to the main -; ROOT_URL. Defaults are false for REDIRECT_OTHER_PORT and 80 for -; PORT_TO_REDIRECT. -REDIRECT_OTHER_PORT = false -PORT_TO_REDIRECT = 80 -; Permission for unix socket -UNIX_SOCKET_PERMISSION = 666 -; Local (DMZ) URL for Gitea workers (such as SSH update) accessing web service. -; In most cases you do not need to change the default value. -; Alter it only if your SSH server node is not the same as HTTP node. -; Do not set this variable if PROTOCOL is set to 'unix'. -LOCAL_ROOT_URL = %(PROTOCOL)s://%(HTTP_ADDR)s:%(HTTP_PORT)s/ -; Disable SSH feature when not available -DISABLE_SSH = false -; Whether to use the builtin SSH server or not. -START_SSH_SERVER = false -; Username to use for the builtin SSH server. If blank, then it is the value of RUN_USER. -BUILTIN_SSH_SERVER_USER = -; Domain name to be exposed in clone URL -SSH_DOMAIN = %(DOMAIN)s -; The network interface the builtin SSH server should listen on -SSH_LISTEN_HOST = -; Port number to be exposed in clone URL -SSH_PORT = 22 -; The port number the builtin SSH server should listen on -SSH_LISTEN_PORT = %(SSH_PORT)s -; Root path of SSH directory, default is '~/.ssh', but you have to use '/home/git/.ssh'. -SSH_ROOT_PATH = -; Gitea will create a authorized_keys file by default when it is not using the internal ssh server -; If you intend to use the AuthorizedKeysCommand functionality then you should turn this off. -SSH_CREATE_AUTHORIZED_KEYS_FILE = true -; Gitea will create a authorized_principals file by default when it is not using the internal ssh server -; If you intend to use the AuthorizedPrincipalsCommand functionality then you should turn this off. -SSH_CREATE_AUTHORIZED_PRINCIPALS_FILE = true -; For the built-in SSH server, choose the ciphers to support for SSH connections, -; for system SSH this setting has no effect -SSH_SERVER_CIPHERS = aes128-ctr, aes192-ctr, aes256-ctr, aes128-gcm@openssh.com, arcfour256, arcfour128 -; For the built-in SSH server, choose the key exchange algorithms to support for SSH connections, -; for system SSH this setting has no effect -SSH_SERVER_KEY_EXCHANGES = diffie-hellman-group1-sha1, diffie-hellman-group14-sha1, ecdh-sha2-nistp256, ecdh-sha2-nistp384, ecdh-sha2-nistp521, curve25519-sha256@libssh.org -; For the built-in SSH server, choose the MACs to support for SSH connections, -; for system SSH this setting has no effect -SSH_SERVER_MACS = hmac-sha2-256-etm@openssh.com, hmac-sha2-256, hmac-sha1, hmac-sha1-96 -; For the built-in SSH server, choose the keypair to offer as the host key -; The private key should be at SSH_SERVER_HOST_KEY and the public SSH_SERVER_HOST_KEY.pub -; relative paths are made absolute relative to the APP_DATA_PATH -SSH_SERVER_HOST_KEYS=ssh/gitea.rsa, ssh/gogs.rsa -; Directory to create temporary files in when testing public keys using ssh-keygen, -; default is the system temporary directory. -SSH_KEY_TEST_PATH = -; Path to ssh-keygen, default is 'ssh-keygen' which means the shell is responsible for finding out which one to call. -SSH_KEYGEN_PATH = ssh-keygen -; Enable SSH Authorized Key Backup when rewriting all keys, default is true -SSH_AUTHORIZED_KEYS_BACKUP = true -; Determines which principals to allow -; - empty: if SSH_TRUSTED_USER_CA_KEYS is empty this will default to off, otherwise will default to email, username. -; - off: Do not allow authorized principals -; - email: the principal must match the user's email -; - username: the principal must match the user's username -; - anything: there will be no checking on the content of the principal -SSH_AUTHORIZED_PRINCIPALS_ALLOW = email, username -; Enable SSH Authorized Principals Backup when rewriting all keys, default is true -SSH_AUTHORIZED_PRINCIPALS_BACKUP = true -; Specifies the public keys of certificate authorities that are trusted to sign user certificates for authentication. -; Multiple keys should be comma separated. -; E.g."ssh- ". or "ssh- , ssh- ". -; For more information see "TrustedUserCAKeys" in the sshd config manpages. -SSH_TRUSTED_USER_CA_KEYS = -; Absolute path of the `TrustedUserCaKeys` file gitea will manage. -; Default this `RUN_USER`/.ssh/gitea-trusted-user-ca-keys.pem -; If you're running your own ssh server and you want to use the gitea managed file you'll also need to modify your -; sshd_config to point to this file. The official docker image will automatically work without further configuration. -SSH_TRUSTED_USER_CA_KEYS_FILENAME = -; Enable exposure of SSH clone URL to anonymous visitors, default is false -SSH_EXPOSE_ANONYMOUS = false -; Indicate whether to check minimum key size with corresponding type -MINIMUM_KEY_SIZE_CHECK = false -; Disable CDN even in "prod" mode -OFFLINE_MODE = false -DISABLE_ROUTER_LOG = false -; Generate steps: -; $ ./gitea cert -ca=true -duration=8760h0m0s -host=myhost.example.com -; -; Or from a .pfx file exported from the Windows certificate store (do -; not forget to export the private key): -; $ openssl pkcs12 -in cert.pfx -out cert.pem -nokeys -; $ openssl pkcs12 -in cert.pfx -out key.pem -nocerts -nodes -; Paths are relative to CUSTOM_PATH -CERT_FILE = https/cert.pem -KEY_FILE = https/key.pem -; Root directory containing templates and static files. -; default is the path where Gitea is executed -STATIC_ROOT_PATH = -; Default path for App data -APP_DATA_PATH = data -; Enable gzip compression for runtime-generated content, static resources excluded -ENABLE_GZIP = false -; Application profiling (memory and cpu) -; For "web" command it listens on localhost:6060 -; For "serve" command it dumps to disk at PPROF_DATA_PATH as (cpuprofile|memprofile)__ -ENABLE_PPROF = false -; PPROF_DATA_PATH, use an absolute path when you start gitea as service -PPROF_DATA_PATH = data/tmp/pprof -; Landing page, can be "home", "explore", "organizations" or "login" -; The "login" choice is not a security measure but just a UI flow change, use REQUIRE_SIGNIN_VIEW to force users to log in. -LANDING_PAGE = home -; Enables git-lfs support. true or false, default is false. -LFS_START_SERVER = false -; Where your lfs files reside, default is data/lfs. -LFS_CONTENT_PATH = data/lfs -; LFS authentication secret, change this yourself +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; The protocol the server listens on. One of 'http', 'https', 'unix' or 'fcgi'. Defaults to 'http' +;PROTOCOL = http +;; +;; Set the domain for the server +;DOMAIN = localhost +;; +;; Overwrite the automatically generated public URL. Necessary for proxies and docker. +;ROOT_URL = %(PROTOCOL)s://%(DOMAIN)s:%(HTTP_PORT)s/ +;; +;; when STATIC_URL_PREFIX is empty it will follow ROOT_URL +;STATIC_URL_PREFIX = +;; +;; The address to listen on. Either a IPv4/IPv6 address or the path to a unix socket. +;HTTP_ADDR = 0.0.0.0 +;; +;; The port to listen on. Leave empty when using a unix socket. +;HTTP_PORT = 3000 +;; +;; If REDIRECT_OTHER_PORT is true, and PROTOCOL is set to https an http server +;; will be started on PORT_TO_REDIRECT and it will redirect plain, non-secure http requests to the main +;; ROOT_URL. Defaults are false for REDIRECT_OTHER_PORT and 80 for +;; PORT_TO_REDIRECT. +;REDIRECT_OTHER_PORT = false +;PORT_TO_REDIRECT = 80 +;; +;; Timeout for any write to the connection. (Set to 0 to disable all timeouts.) +;PER_WRITE_TIMEOUT = 30s +;; +;; Timeout per Kb written to connections. +;PER_WRITE_PER_KB_TIMEOUT = 30s +;; +;; Permission for unix socket +;UNIX_SOCKET_PERMISSION = 666 +;; +;; Local (DMZ) URL for Gitea workers (such as SSH update) accessing web service. +;; In most cases you do not need to change the default value. +;; Alter it only if your SSH server node is not the same as HTTP node. +;; Do not set this variable if PROTOCOL is set to 'unix'. +;LOCAL_ROOT_URL = %(PROTOCOL)s://%(HTTP_ADDR)s:%(HTTP_PORT)s/ +;; +;; Disable SSH feature when not available +;DISABLE_SSH = false +;; +;; Whether to use the builtin SSH server or not. +;START_SSH_SERVER = false +;; +;; Username to use for the builtin SSH server. If blank, then it is the value of RUN_USER. +;BUILTIN_SSH_SERVER_USER = +;; +;; Domain name to be exposed in clone URL +;SSH_DOMAIN = %(DOMAIN)s +;; +;; The network interface the builtin SSH server should listen on +;SSH_LISTEN_HOST = +;; +;; Port number to be exposed in clone URL +;SSH_PORT = 22 +;; +;; The port number the builtin SSH server should listen on +;SSH_LISTEN_PORT = %(SSH_PORT)s +;; +;; Root path of SSH directory, default is '~/.ssh', but you have to use '/home/git/.ssh'. +;SSH_ROOT_PATH = +;; +;; Gitea will create a authorized_keys file by default when it is not using the internal ssh server +;; If you intend to use the AuthorizedKeysCommand functionality then you should turn this off. +;SSH_CREATE_AUTHORIZED_KEYS_FILE = true +;; +;; Gitea will create a authorized_principals file by default when it is not using the internal ssh server +;; If you intend to use the AuthorizedPrincipalsCommand functionality then you should turn this off. +;SSH_CREATE_AUTHORIZED_PRINCIPALS_FILE = true +;; +;; For the built-in SSH server, choose the ciphers to support for SSH connections, +;; for system SSH this setting has no effect +;SSH_SERVER_CIPHERS = aes128-ctr, aes192-ctr, aes256-ctr, aes128-gcm@openssh.com, arcfour256, arcfour128 +;; +;; For the built-in SSH server, choose the key exchange algorithms to support for SSH connections, +;; for system SSH this setting has no effect +;SSH_SERVER_KEY_EXCHANGES = diffie-hellman-group1-sha1, diffie-hellman-group14-sha1, ecdh-sha2-nistp256, ecdh-sha2-nistp384, ecdh-sha2-nistp521, curve25519-sha256@libssh.org +;; +;; For the built-in SSH server, choose the MACs to support for SSH connections, +;; for system SSH this setting has no effect +;SSH_SERVER_MACS = hmac-sha2-256-etm@openssh.com, hmac-sha2-256, hmac-sha1, hmac-sha1-96 +;; +;; For the built-in SSH server, choose the keypair to offer as the host key +;; The private key should be at SSH_SERVER_HOST_KEY and the public SSH_SERVER_HOST_KEY.pub +;; relative paths are made absolute relative to the APP_DATA_PATH +;SSH_SERVER_HOST_KEYS=ssh/gitea.rsa, ssh/gogs.rsa +;; +;; Directory to create temporary files in when testing public keys using ssh-keygen, +;; default is the system temporary directory. +;SSH_KEY_TEST_PATH = +;; +;; Path to ssh-keygen, default is 'ssh-keygen' which means the shell is responsible for finding out which one to call. +;SSH_KEYGEN_PATH = ssh-keygen +;; +;; Enable SSH Authorized Key Backup when rewriting all keys, default is true +;SSH_AUTHORIZED_KEYS_BACKUP = true +;; +;; Determines which principals to allow +;; - empty: if SSH_TRUSTED_USER_CA_KEYS is empty this will default to off, otherwise will default to email, username. +;; - off: Do not allow authorized principals +;; - email: the principal must match the user's email +;; - username: the principal must match the user's username +;; - anything: there will be no checking on the content of the principal +;SSH_AUTHORIZED_PRINCIPALS_ALLOW = email, username +;; +;; Enable SSH Authorized Principals Backup when rewriting all keys, default is true +;SSH_AUTHORIZED_PRINCIPALS_BACKUP = true +;; +;; Specifies the public keys of certificate authorities that are trusted to sign user certificates for authentication. +;; Multiple keys should be comma separated. +;; E.g."ssh- ". or "ssh- , ssh- ". +;; For more information see "TrustedUserCAKeys" in the sshd config manpages. +;SSH_TRUSTED_USER_CA_KEYS = +;; Absolute path of the `TrustedUserCaKeys` file gitea will manage. +;; Default this `RUN_USER`/.ssh/gitea-trusted-user-ca-keys.pem +;; If you're running your own ssh server and you want to use the gitea managed file you'll also need to modify your +;; sshd_config to point to this file. The official docker image will automatically work without further configuration. +;SSH_TRUSTED_USER_CA_KEYS_FILENAME = +;; +;; Enable exposure of SSH clone URL to anonymous visitors, default is false +;SSH_EXPOSE_ANONYMOUS = false +;; +;; Timeout for any write to ssh connections. (Set to 0 to disable all timeouts.) +;; Will default to the PER_WRITE_TIMEOUT. +;SSH_PER_WRITE_TIMEOUT = 30s +;; +;; Timeout per Kb written to ssh connections. +;; Will default to the PER_WRITE_PER_KB_TIMEOUT. +;SSH_PER_WRITE_PER_KB_TIMEOUT = 30s +;; +;; Indicate whether to check minimum key size with corresponding type +;MINIMUM_KEY_SIZE_CHECK = false +;; +;; Disable CDN even in "prod" mode +;OFFLINE_MODE = false +;DISABLE_ROUTER_LOG = false +;; +;; Generate steps: +;; $ ./gitea cert -ca=true -duration=8760h0m0s -host=myhost.example.com +;; +;; Or from a .pfx file exported from the Windows certificate store (do +;; not forget to export the private key): +;; $ openssl pkcs12 -in cert.pfx -out cert.pem -nokeys +;; $ openssl pkcs12 -in cert.pfx -out key.pem -nocerts -nodes +;; Paths are relative to CUSTOM_PATH +;CERT_FILE = https/cert.pem +;KEY_FILE = https/key.pem +;; +;; Root directory containing templates and static files. +;; default is the path where Gitea is executed +;STATIC_ROOT_PATH = +;; +;; Default path for App data +;APP_DATA_PATH = data +;; +;; Enable gzip compression for runtime-generated content, static resources excluded +;ENABLE_GZIP = false +;; +;; Application profiling (memory and cpu) +;; For "web" command it listens on localhost:6060 +;; For "serve" command it dumps to disk at PPROF_DATA_PATH as (cpuprofile|memprofile)__ +;ENABLE_PPROF = false +;; +;; PPROF_DATA_PATH, use an absolute path when you start gitea as service +;PPROF_DATA_PATH = data/tmp/pprof +;; +;; Landing page, can be "home", "explore", "organizations" or "login" +;; The "login" choice is not a security measure but just a UI flow change, use REQUIRE_SIGNIN_VIEW to force users to log in. +;LANDING_PAGE = home +;; +;; Enables git-lfs support. true or false, default is false. +;LFS_START_SERVER = false +;; +;; Where your lfs files reside, default is data/lfs. +;LFS_CONTENT_PATH = data/lfs +;; +;; LFS authentication secret, change this yourself LFS_JWT_SECRET = -; LFS authentication validity period (in time.Duration), pushes taking longer than this may fail. -LFS_HTTP_AUTH_EXPIRY = 20m -; Maximum allowed LFS file size in bytes (Set to 0 for no limit). -LFS_MAX_FILE_SIZE = 0 -; Maximum number of locks returned per page -LFS_LOCKS_PAGING_NUM = 50 -; Allow graceful restarts using SIGHUP to fork -ALLOW_GRACEFUL_RESTARTS = true -; After a restart the parent will finish ongoing requests before -; shutting down. Force shutdown if this process takes longer than this delay. -; set to a negative value to disable -GRACEFUL_HAMMER_TIME = 60s -; Allows the setting of a startup timeout and waithint for Windows as SVC service -; 0 disables this. -STARTUP_TIMEOUT = 0 -; Static resources, includes resources on custom/, public/ and all uploaded avatars web browser cache time. Note that this cache is disabled when RUN_MODE is "dev". Default is 6h -STATIC_CACHE_TIME = 6h - -; Define allowed algorithms and their minimum key length (use -1 to disable a type) -[ssh.minimum_key_sizes] -ED25519 = 256 -ECDSA = 256 -RSA = 2048 -DSA = -1 ; set to 1024 to switch on - +;; +;; LFS authentication validity period (in time.Duration), pushes taking longer than this may fail. +;LFS_HTTP_AUTH_EXPIRY = 20m +;; +;; Maximum allowed LFS file size in bytes (Set to 0 for no limit). +;LFS_MAX_FILE_SIZE = 0 +;; +;; Maximum number of locks returned per page +;LFS_LOCKS_PAGING_NUM = 50 +;; +;; Allow graceful restarts using SIGHUP to fork +;ALLOW_GRACEFUL_RESTARTS = true +;; +;; After a restart the parent will finish ongoing requests before +;; shutting down. Force shutdown if this process takes longer than this delay. +;; set to a negative value to disable +;GRACEFUL_HAMMER_TIME = 60s +;; +;; Allows the setting of a startup timeout and waithint for Windows as SVC service +;; 0 disables this. +;STARTUP_TIMEOUT = 0 +;; +;; Static resources, includes resources on custom/, public/ and all uploaded avatars web browser cache time. Note that this cache is disabled when RUN_MODE is "dev". Default is 6h +;STATIC_CACHE_TIME = 6h + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; [database] -; Database to use. Either "mysql", "postgres", "mssql" or "sqlite3". +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Database to use. Either "mysql", "postgres", "mssql" or "sqlite3". +;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; MySQL Configuration +;; DB_TYPE = mysql -HOST = 127.0.0.1:3306 +HOST = 127.0.0.1:3306 ; can use socket e.g. /var/run/mysqld/mysqld.sock NAME = gitea USER = root -; Use PASSWD = `your password` for quoting if you use special characters in the password. -PASSWD = -; For Postgres, schema to use if different from "public". The schema must exist beforehand, -; the user must have creation privileges on it, and the user search path must be set -; to the look into the schema first. e.g.:ALTER USER user SET SEARCH_PATH = schema_name,"$user",public; -SCHEMA = -; For Postgres, either "disable" (default), "require", or "verify-full" -; For MySQL, either "false" (default), "true", or "skip-verify" -SSL_MODE = disable -; For MySQL only, either "utf8" or "utf8mb4", default is "utf8mb4". -; NOTICE: for "utf8mb4" you must use MySQL InnoDB > 5.6. Gitea is unable to check this. -CHARSET = utf8mb4 -; For "sqlite3" and "tidb", use an absolute path when you start gitea as service -PATH = data/gitea.db -; For "sqlite3" only. Query timeout -SQLITE_TIMEOUT = 500 -; For iterate buffer, default is 50 -ITERATE_BUFFER_SIZE = 50 -; Show the database generated SQL -LOG_SQL = true -; Maximum number of DB Connect retries -DB_RETRIES = 10 -; Backoff time per DB retry (time.Duration) -DB_RETRY_BACKOFF = 3s -; Max idle database connections on connection pool, default is 2 -MAX_IDLE_CONNS = 2 -; Database connection max life time, default is 0 or 3s mysql (See #6804 & #7071 for reasoning) -CONN_MAX_LIFETIME = 3s -; Database maximum number of open connections, default is 0 meaning no maximum -MAX_OPEN_CONNS = 0 - -[indexer] -; Issue indexer type, currently support: bleve, db or elasticsearch, default is bleve -ISSUE_INDEXER_TYPE = bleve -; Issue indexer connection string, available when ISSUE_INDEXER_TYPE is elasticsearch -ISSUE_INDEXER_CONN_STR = http://elastic:changeme@localhost:9200 -; Issue indexer name, available when ISSUE_INDEXER_TYPE is elasticsearch -ISSUE_INDEXER_NAME = gitea_issues -; Issue indexer storage path, available when ISSUE_INDEXER_TYPE is bleve -ISSUE_INDEXER_PATH = indexers/issues.bleve -; Issue indexer queue, currently support: channel, levelqueue or redis, default is levelqueue -ISSUE_INDEXER_QUEUE_TYPE = levelqueue -; When ISSUE_INDEXER_QUEUE_TYPE is levelqueue, this will be the path where the queue will be saved. -; This can be overridden by `ISSUE_INDEXER_QUEUE_CONN_STR`. -; default is indexers/issues.queue -ISSUE_INDEXER_QUEUE_DIR = indexers/issues.queue -; When `ISSUE_INDEXER_QUEUE_TYPE` is `redis`, this will store the redis connection string. -; When `ISSUE_INDEXER_QUEUE_TYPE` is `levelqueue`, this is a directory or additional options of -; the form `leveldb://path/to/db?option=value&....`, and overrides `ISSUE_INDEXER_QUEUE_DIR`. -ISSUE_INDEXER_QUEUE_CONN_STR = "addrs=127.0.0.1:6379 db=0" -; Batch queue number, default is 20 -ISSUE_INDEXER_QUEUE_BATCH_NUMBER = 20 -; Timeout the indexer if it takes longer than this to start. -; Set to zero to disable timeout. -STARTUP_TIMEOUT = 30s - -; repo indexer by default disabled, since it uses a lot of disk space -REPO_INDEXER_ENABLED = false -; Code search engine type, could be `bleve` or `elasticsearch`. -REPO_INDEXER_TYPE = bleve -; Index file used for code search. -REPO_INDEXER_PATH = indexers/repos.bleve -; Code indexer connection string, available when `REPO_INDEXER_TYPE` is elasticsearch. i.e. http://elastic:changeme@localhost:9200 -REPO_INDEXER_CONN_STR = -; Code indexer name, available when `REPO_INDEXER_TYPE` is elasticsearch -REPO_INDEXER_NAME = gitea_codes - -UPDATE_BUFFER_LEN = 20 -MAX_FILE_SIZE = 1048576 -; A comma separated list of glob patterns (see https://github.com/gobwas/glob) to include -; in the index; default is empty -REPO_INDEXER_INCLUDE = -; A comma separated list of glob patterns to exclude from the index; ; default is empty -REPO_INDEXER_EXCLUDE = - -[queue] -; Specific queues can be individually configured with [queue.name]. [queue] provides defaults -; -; General queue queue type, currently support: persistable-channel, channel, level, redis, dummy -; default to persistable-channel -TYPE = persistable-channel -; data-dir for storing persistable queues and level queues, individual queues will be named by their type -DATADIR = queues/ -; Default queue length before a channel queue will block -LENGTH = 20 -; Batch size to send for batched queues -BATCH_LENGTH = 20 -; Connection string for redis queues this will store the redis connection string. -; When `TYPE` is `persistable-channel`, this provides a directory for the underlying leveldb -; or additional options of the form `leveldb://path/to/db?option=value&....`, and will override `DATADIR`. -CONN_STR = "addrs=127.0.0.1:6379 db=0" -; Provides the suffix of the default redis/disk queue name - specific queues can be overridden within in their [queue.name] sections. -QUEUE_NAME = "_queue" -; Provides the suffix of the default redis/disk unique queue set name - specific queues can be overridden within in their [queue.name] sections. -SET_NAME = "_unique" -; If the queue cannot be created at startup - level queues may need a timeout at startup - wrap the queue: -WRAP_IF_NECESSARY = true -; Attempt to create the wrapped queue at max -MAX_ATTEMPTS = 10 -; Timeout queue creation -TIMEOUT = 15m30s -; Create a pool with this many workers -WORKERS = 1 -; Dynamically scale the worker pool to at this many workers -MAX_WORKERS = 10 -; Add boost workers when the queue blocks for BLOCK_TIMEOUT -BLOCK_TIMEOUT = 1s -; Remove the boost workers after BOOST_TIMEOUT -BOOST_TIMEOUT = 5m -; During a boost add BOOST_WORKERS -BOOST_WORKERS = 5 - -[admin] -; Disallow regular (non-admin) users from creating organizations. -DISABLE_REGULAR_ORG_CREATION = false -; Default configuration for email notifications for users (user configurable). Options: enabled, onmention, disabled -DEFAULT_EMAIL_NOTIFICATIONS = enabled - +;PASSWD = ;Use PASSWD = `your password` for quoting if you use special characters in the password. +;SSL_MODE = false ; either "false" (default), "true", or "skip-verify" +;CHARSET = utf8mb4 ;either "utf8" or "utf8mb4", default is "utf8mb4". +;; +;; NOTICE: for "utf8mb4" you must use MySQL InnoDB > 5.6. Gitea is unable to check this. +;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Postgres Configuration +;; +;DB_TYPE = postgres +;HOST = 127.0.0.1:5432 ; can use socket e.g. /var/run/postgresql/ +;NAME = gitea +;USER = root +;PASSWD = +;SCHEMA = +;SSL_MODE=disable ;either "disable" (default), "require", or "verify-full" +;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; SQLite Configuration +;; +;DB_TYPE = sqlite3 +;PATH= ; defaults to data/gitea.db +;SQLITE_TIMEOUT = ; Query timeout defaults to: 500 +;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; MSSQL Configuration +;; +;DB_TYPE = mssql +;HOST = 172.17.0.2:1433 +;NAME = gitea +;USER = SA +;PASSWD = MwantsaSecurePassword1 +;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Other settings +;; +;; For iterate buffer, default is 50 +;ITERATE_BUFFER_SIZE = 50 +;; +;; Show the database generated SQL +LOG_SQL = false ; if unset defaults to true +;; +;; Maximum number of DB Connect retries +;DB_RETRIES = 10 +;; +;; Backoff time per DB retry (time.Duration) +;DB_RETRY_BACKOFF = 3s +;; +;; Max idle database connections on connection pool, default is 2 +;MAX_IDLE_CONNS = 2 +;; +;; Database connection max life time, default is 0 or 3s mysql (See #6804 & #7071 for reasoning) +;CONN_MAX_LIFETIME = 3s +;; +;; Database maximum number of open connections, default is 0 meaning no maximum +;MAX_OPEN_CONNS = 0 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; [security] -; Whether the installer is disabled +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Whether the installer is disabled (set to true to disable the installer) INSTALL_LOCK = false -; !!CHANGE THIS TO KEEP YOUR USER DATA SAFE!! -SECRET_KEY = !#@FDEWREWR&*( -; How long to remember that a user is logged in before requiring relogin (in days) -LOGIN_REMEMBER_DAYS = 7 -COOKIE_USERNAME = gitea_awesome -COOKIE_REMEMBER_NAME = gitea_incredible -; Reverse proxy authentication header name of user name -REVERSE_PROXY_AUTHENTICATION_USER = X-WEBAUTH-USER -REVERSE_PROXY_AUTHENTICATION_EMAIL = X-WEBAUTH-EMAIL -; Interpret X-Forwarded-For header or the X-Real-IP header and set this as the remote IP for the request -REVERSE_PROXY_LIMIT = 1 -; List of IP addresses and networks separated by comma of trusted proxy servers. Use `*` to trust all. -REVERSE_PROXY_TRUSTED_PROXIES = 127.0.0.0/8,::1/128 -; The minimum password length for new Users -MIN_PASSWORD_LENGTH = 6 -; Set to true to allow users to import local server paths -IMPORT_LOCAL_PATHS = false -; Set to false to allow users with git hook privileges to create custom git hooks. -; Custom git hooks can be used to perform arbitrary code execution on the host operating system. -; This enables the users to access and modify this config file and the Gitea database and interrupt the Gitea service. -; By modifying the Gitea database, users can gain Gitea administrator privileges. -; It also enables them to access other resources available to the user on the operating system that is running the Gitea instance and perform arbitrary actions in the name of the Gitea OS user. -; WARNING: This maybe harmful to you website or your operating system. -DISABLE_GIT_HOOKS = true -; Set to true to disable webhooks feature. -DISABLE_WEBHOOKS = false -; Set to false to allow pushes to gitea repositories despite having an incomplete environment - NOT RECOMMENDED -ONLY_ALLOW_PUSH_IF_GITEA_ENVIRONMENT_SET = true -;Comma separated list of character classes required to pass minimum complexity. -;If left empty or no valid values are specified, the default is off (no checking) -;Classes include "lower,upper,digit,spec" -PASSWORD_COMPLEXITY = off -; Password Hash algorithm, either "argon2", "pbkdf2", "scrypt" or "bcrypt" -PASSWORD_HASH_ALGO = pbkdf2 -; Set false to allow JavaScript to read CSRF cookie -CSRF_COOKIE_HTTP_ONLY = true -; Validate against https://haveibeenpwned.com/Passwords to see if a password has been exposed -PASSWORD_CHECK_PWN = false - -[openid] -; -; OpenID is an open, standard and decentralized authentication protocol. -; Your identity is the address of a webpage you provide, which describes -; how to prove you are in control of that page. -; -; For more info: https://en.wikipedia.org/wiki/OpenID -; -; Current implementation supports OpenID-2.0 +;; +;; Global secret key that will be used - if blank will be regenerated. +SECRET_KEY = +;; +;; Secret used to validate communication within Gitea binary. +INTERNAL_TOKEN= +;; +;; Instead of defining internal token in the configuration, this configuration option can be used to give Gitea a path to a file that contains the internal token (example value: file:/etc/gitea/internal_token) +;INTERNAL_TOKEN_URI = ;e.g. /etc/gitea/internal_token +;; +;; How long to remember that a user is logged in before requiring relogin (in days) +;LOGIN_REMEMBER_DAYS = 7 +;; +;; Name of the cookie used to store the current username. +;COOKIE_USERNAME = gitea_awesome +;; +;; Name of cookie used to store authentication information. +;COOKIE_REMEMBER_NAME = gitea_incredible +;; +;; Reverse proxy authentication header name of user name and email +;REVERSE_PROXY_AUTHENTICATION_USER = X-WEBAUTH-USER +;REVERSE_PROXY_AUTHENTICATION_EMAIL = X-WEBAUTH-EMAIL +;; +;; Interpret X-Forwarded-For header or the X-Real-IP header and set this as the remote IP for the request +;REVERSE_PROXY_LIMIT = 1 +;; +;; List of IP addresses and networks separated by comma of trusted proxy servers. Use `*` to trust all. +;REVERSE_PROXY_TRUSTED_PROXIES = 127.0.0.0/8,::1/128 +;; +;; The minimum password length for new Users +;MIN_PASSWORD_LENGTH = 6 +;; +;; Set to true to allow users to import local server paths +;IMPORT_LOCAL_PATHS = false +;; +;; Set to false to allow users with git hook privileges to create custom git hooks. +;; Custom git hooks can be used to perform arbitrary code execution on the host operating system. +;; This enables the users to access and modify this config file and the Gitea database and interrupt the Gitea service. +;; By modifying the Gitea database, users can gain Gitea administrator privileges. +;; It also enables them to access other resources available to the user on the operating system that is running the Gitea instance and perform arbitrary actions in the name of the Gitea OS user. +;; WARNING: This maybe harmful to you website or your operating system. +;DISABLE_GIT_HOOKS = true +;; +;; Set to true to disable webhooks feature. +;DISABLE_WEBHOOKS = false +;; +;; Set to false to allow pushes to gitea repositories despite having an incomplete environment - NOT RECOMMENDED +;ONLY_ALLOW_PUSH_IF_GITEA_ENVIRONMENT_SET = true +;; +;;Comma separated list of character classes required to pass minimum complexity. +;;If left empty or no valid values are specified, the default is off (no checking) +;;Classes include "lower,upper,digit,spec" +;PASSWORD_COMPLEXITY = off +;; +;; Password Hash algorithm, either "argon2", "pbkdf2", "scrypt" or "bcrypt" +;PASSWORD_HASH_ALGO = pbkdf2 +;; +;; Set false to allow JavaScript to read CSRF cookie +;CSRF_COOKIE_HTTP_ONLY = true +;; +;; Validate against https://haveibeenpwned.com/Passwords to see if a password has been exposed +;PASSWORD_CHECK_PWN = false + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +[oauth2] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Enables OAuth2 provider +ENABLE = true +;; +;; Algorithm used to sign OAuth2 tokens. Valid values: HS256, HS384, HS512, RS256, RS384, RS512, ES256, ES384, ES512 +;JWT_SIGNING_ALGORITHM = RS256 +;; +;; Private key file path used to sign OAuth2 tokens. The path is relative to APP_DATA_PATH. +;; This setting is only needed if JWT_SIGNING_ALGORITHM is set to RS256, RS384, RS512, ES256, ES384 or ES512. +;; The file must contain a RSA or ECDSA private key in the PKCS8 format. If no key exists a 4096 bit key will be created for you. +;JWT_SIGNING_PRIVATE_KEY_FILE = jwt/private.pem +;; +;; OAuth2 authentication secret for access and refresh tokens, change this yourself to a unique string. CLI generate option is helpful in this case. https://docs.gitea.io/en-us/command-line/#generate +;; This setting is only needed if JWT_SIGNING_ALGORITHM is set to HS256, HS384 or HS512. +;JWT_SECRET = +;; +;; Lifetime of an OAuth2 access token in seconds +;ACCESS_TOKEN_EXPIRATION_TIME = 3600 +;; +;; Lifetime of an OAuth2 refresh token in hours +;REFRESH_TOKEN_EXPIRATION_TIME = 730 +;; +;; Check if refresh token got already used +;INVALIDATE_REFRESH_TOKENS = false +;; +;; Maximum length of oauth2 token/cookie stored on server +;MAX_TOKEN_LENGTH = 32767 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +[U2F] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; NOTE: THE DEFAULT VALUES HERE WILL NEED TO BE CHANGED +;; Two Factor authentication with security keys +;; https://developers.yubico.com/U2F/App_ID.html +APP_ID = ; e.g. http://localhost:3000/ +;; Comma separated list of trusted facets +TRUSTED_FACETS = ; e.g. http://localhost:3000/ + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +[log] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Root path for the log files - defaults to %(GITEA_WORK_DIR)/log +;ROOT_PATH = +;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Main Logger +;; +;; Either "console", "file", "conn", "smtp" or "database", default is "console" +;; Use comma to separate multiple modes, e.g. "console, file" +MODE = console +;; +;; Either "Trace", "Debug", "Info", "Warn", "Error", "Critical", default is "Trace" +LEVEL = Info +;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Router Logger +;; +;; Switch off the router log +;DISABLE_ROUTER_LOG= ; false +;; +;; Set the log "modes" for the router log (if file is set the log file will default to router.log) +ROUTER = console +;; +;; The level at which the router logs +;ROUTER_LOG_LEVEL = Info +;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Access Logger (Creates log in NCSA common log format) +;; +;ENABLE_ACCESS_LOG = false +;; Set the log "modes" for the access log (if file is set the log file will default to access.log) +;ACCESS = file +;; +;; Sets the template used to create the access log. +;ACCESS_LOG_TEMPLATE = {{.Ctx.RemoteAddr}} - {{.Identity}} {{.Start.Format "[02/Jan/2006:15:04:05 -0700]" }} "{{.Ctx.Req.Method}} {{.Ctx.Req.URL.RequestURI}} {{.Ctx.Req.Proto}}" {{.ResponseWriter.Status}} {{.ResponseWriter.Size}} "{{.Ctx.Req.Referer}}\" \"{{.Ctx.Req.UserAgent}}" +;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; SSH log (Creates log from ssh git request) +;; +;ENABLE_SSH_LOG = false +;; +;; Other Settings +;; +;; Print Stacktraces with logs. (Rarely helpful.) Either "Trace", "Debug", "Info", "Warn", "Error", "Critical", default is "None" +;STACKTRACE_LEVEL = None +;; +;; Buffer length of the channel, keep it as it is if you don't know what it is. +;BUFFER_LEN = 10000 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Creating specific log configuration +;; +;; You can set specific configuration for individual modes and subloggers +;; +;; Configuration available to all log modes/subloggers +;LEVEL= +;FLAGS = stdflags +;EXPRESSION = +;PREFIX = +;COLORIZE = false +;; +;; For "console" mode only +;STDERR = false +;; +;; For "file" mode only +;LEVEL = +;; Set the file_name for the logger. If this is a relative path this +;; will be relative to ROOT_PATH +;FILE_NAME = +;; This enables automated log rotate(switch of following options), default is true +;LOG_ROTATE = true +;; Max size shift of a single file, default is 28 means 1 << 28, 256MB +;MAX_SIZE_SHIFT = 28 +;; Segment log daily, default is true +;DAILY_ROTATE = true +;; delete the log file after n days, default is 7 +;MAX_DAYS = 7 +;; compress logs with gzip +;COMPRESS = true +;; compression level see godoc for compress/gzip +;COMPRESSION_LEVEL = -1 ; -; Tested to work providers at the time of writing: -; - Any GNUSocial node (your.hostname.tld/username) -; - Any SimpleID provider (http://simpleid.koinic.net) -; - http://openid.org.cn/ -; - openid.stackexchange.com -; - login.launchpad.net -; - .livejournal.com +;; For "conn" mode only +;LEVEL = +;; Reconnect host for every single message, default is false +;RECONNECT_ON_MSG = false +;; Try to reconnect when connection is lost, default is false +;RECONNECT = false +;; Either "tcp", "unix" or "udp", default is "tcp" +;PROTOCOL = tcp +;; Host address +;ADDR = ; -; Whether to allow signin in via OpenID -ENABLE_OPENID_SIGNIN = true -; Whether to allow registering via OpenID -; Do not include to rely on rhw DISABLE_REGISTRATION setting -;ENABLE_OPENID_SIGNUP = true -; Allowed URI patterns (POSIX regexp). -; Space separated. -; Only these would be allowed if non-blank. -; Example value: trusted.domain.org trusted.domain.net -WHITELISTED_URIS = -; Forbidden URI patterns (POSIX regexp). -; Space separated. -; Only used if WHITELISTED_URIS is blank. -; Example value: loadaverage.org/badguy stackexchange.com/.*spammer -BLACKLISTED_URIS = - -[oauth2_client] -; Whether a new auto registered oauth2 user needs to confirm their email. -; Do not include to use the REGISTER_EMAIL_CONFIRM setting from the `[service]` section. -REGISTER_EMAIL_CONFIRM = -; Scopes for the openid connect oauth2 provider (separated by space, the openid scope is implicitly added). -; Typical values are profile and email. -; For more information about the possible values see https://openid.net/specs/openid-connect-core-1_0.html#ScopeClaims -OPENID_CONNECT_SCOPES = -; Automatically create user accounts for new oauth2 users. -ENABLE_AUTO_REGISTRATION = false -; The source of the username for new oauth2 accounts: -; userid = use the userid / sub attribute -; nickname = use the nickname attribute -; email = use the username part of the email attribute -USERNAME = nickname -; Update avatar if available from oauth2 provider. -; Update will be performed on each login. -UPDATE_AVATAR = false -; How to handle if an account / email already exists: -; disabled = show an error -; login = show an account linking login -; auto = link directly with the account -ACCOUNT_LINKING = disabled - +;; For "smtp" mode only +;LEVEL = +;; Name displayed in mail title, default is "Diagnostic message from server" +;SUBJECT = Diagnostic message from server +;; Mail server +;HOST = +;; Mailer user name and password +;USER = +;; Use PASSWD = `your password` for quoting if you use special characters in the password. +;PASSWD = +;; Receivers, can be one or more, e.g. 1@example.com,2@example.com +;RECEIVERS = + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +[git] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; The path of git executable. If empty, Gitea searches through the PATH environment. +PATH = +;; +;; Disables highlight of added and removed changes +;DISABLE_DIFF_HIGHLIGHT = false +;; +;; Max number of lines allowed in a single file in diff view +;MAX_GIT_DIFF_LINES = 1000 +;; +;; Max number of allowed characters in a line in diff view +;MAX_GIT_DIFF_LINE_CHARACTERS = 5000 +;; +;; Max number of files shown in diff view +;MAX_GIT_DIFF_FILES = 100 +;; +;; Set the default commits range size +;COMMITS_RANGE_SIZE = 50 +;; +;; Set the default branches range size +;BRANCHES_RANGE_SIZE = 20 +;; +;; Arguments for command 'git gc', e.g. "--aggressive --auto" +;; see more on http://git-scm.com/docs/git-gc/ +;GC_ARGS = +;; +;; If use git wire protocol version 2 when git version >= 2.18, default is true, set to false when you always want git wire protocol version 1 +;ENABLE_AUTO_GIT_WIRE_PROTOCOL = true +;; +;; Respond to pushes to a non-default branch with a URL for creating a Pull Request (if the repository has them enabled) +;PULL_REQUEST_PUSH_MESSAGE = true +;; +;; (Go-Git only) Don't cache objects greater than this in memory. (Set to 0 to disable.) +;LARGE_OBJECT_THRESHOLD = 1048576 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; [service] -; Time limit to confirm account/email registration -ACTIVE_CODE_LIVE_MINUTES = 180 -; Time limit to perform the reset of a forgotten password -RESET_PASSWD_CODE_LIVE_MINUTES = 180 -; Whether a new user needs to confirm their email when registering. -REGISTER_EMAIL_CONFIRM = false -; Whether a new user needs to be confirmed manually after registration. (Requires `REGISTER_EMAIL_CONFIRM` to be disabled.) -REGISTER_MANUAL_CONFIRM = false -; List of domain names that are allowed to be used to register on a Gitea instance -; gitea.io,example.com -EMAIL_DOMAIN_WHITELIST = -; Comma-separated list of domain names that are not allowed to be used to register on a Gitea instance -EMAIL_DOMAIN_BLOCKLIST = -; Disallow registration, only allow admins to create accounts. -DISABLE_REGISTRATION = false -; Allow registration only using third-party services, it works only when DISABLE_REGISTRATION is false -ALLOW_ONLY_EXTERNAL_REGISTRATION = false -; User must sign in to view anything. -REQUIRE_SIGNIN_VIEW = false -; Mail notification -ENABLE_NOTIFY_MAIL = false -; This setting enables gitea to be signed in with HTTP BASIC Authentication using the user's password -; If you set this to false you will not be able to access the tokens endpoints on the API with your password -; Please note that setting this to false will not disable OAuth Basic or Basic authentication using a token -ENABLE_BASIC_AUTHENTICATION = true -; More detail: https://github.com/gogits/gogs/issues/165 -ENABLE_REVERSE_PROXY_AUTHENTICATION = false -ENABLE_REVERSE_PROXY_AUTO_REGISTRATION = false -ENABLE_REVERSE_PROXY_EMAIL = false -; Enable captcha validation for registration -ENABLE_CAPTCHA = false -; Type of captcha you want to use. Options: image, recaptcha, hcaptcha -CAPTCHA_TYPE = image -; Enable recaptcha to use Google's recaptcha service -; Go to https://www.google.com/recaptcha/admin to sign up for a key -RECAPTCHA_SECRET = -RECAPTCHA_SITEKEY = -; For hCaptcha, create an account at https://accounts.hcaptcha.com/login to get your keys -HCAPTCHA_SECRET = -HCAPTCHA_SITEKEY = -; Change this to use recaptcha.net or other recaptcha service -RECAPTCHA_URL = https://www.google.com/recaptcha/ -; Default value for KeepEmailPrivate -; Each new user will get the value of this setting copied into their profile -DEFAULT_KEEP_EMAIL_PRIVATE = false -; Default value for AllowCreateOrganization -; Every new user will have rights set to create organizations depending on this setting -DEFAULT_ALLOW_CREATE_ORGANIZATION = true -; Either "public", "limited" or "private", default is "public" -; Limited is for signed user only -; Private is only for member of the organization -; Public is for everyone -DEFAULT_ORG_VISIBILITY = public -; Default value for DefaultOrgMemberVisible -; True will make the membership of the users visible when added to the organisation -DEFAULT_ORG_MEMBER_VISIBLE = false -; Default value for EnableDependencies -; Repositories will use dependencies by default depending on this setting -DEFAULT_ENABLE_DEPENDENCIES = true -; Dependencies can be added from any repository where the user is granted access or only from the current repository depending on this setting. -ALLOW_CROSS_REPOSITORY_DEPENDENCIES = true -; Enable heatmap on users profiles. -ENABLE_USER_HEATMAP = true -; Enable Timetracking -ENABLE_TIMETRACKING = true -; Default value for EnableTimetracking -; Repositories will use timetracking by default depending on this setting -DEFAULT_ENABLE_TIMETRACKING = true -; Default value for AllowOnlyContributorsToTrackTime -; Only users with write permissions can track time if this is true -DEFAULT_ALLOW_ONLY_CONTRIBUTORS_TO_TRACK_TIME = true -; Value for the domain part of the user's email address in the git log if user -; has set KeepEmailPrivate to true. The user's email will be replaced with a -; concatenation of the user name in lower case, "@" and NO_REPLY_ADDRESS. Default -; value is "noreply." + DOMAIN, where DOMAIN resolves to the value from server.DOMAIN -; Note: do not use the notation below -NO_REPLY_ADDRESS = noreply. -; Show Registration button -SHOW_REGISTRATION_BUTTON = true -; Show milestones dashboard page - a view of all the user's milestones -SHOW_MILESTONES_DASHBOARD_PAGE = true -; Default value for AutoWatchNewRepos -; When adding a repo to a team or creating a new repo all team members will watch the -; repo automatically if enabled -AUTO_WATCH_NEW_REPOS = true -; Default value for AutoWatchOnChanges -; Make the user watch a repository When they commit for the first time -AUTO_WATCH_ON_CHANGES = false -; Minimum amount of time a user must exist before comments are kept when the user is deleted. -USER_DELETE_WITH_COMMENTS_MAX_TIME = 0 - -[webhook] -; Hook task queue length, increase if webhook shooting starts hanging -QUEUE_LENGTH = 1000 -; Deliver timeout in seconds -DELIVER_TIMEOUT = 5 -; Allow insecure certification -SKIP_TLS_VERIFY = false -; Number of history information in each page -PAGING_NUM = 10 -; Proxy server URL, support http://, https//, socks://, blank will follow environment http_proxy/https_proxy -PROXY_URL = -; Comma separated list of host names requiring proxy. Glob patterns (*) are accepted; use ** to match all hosts. -PROXY_HOSTS = - -[mailer] -ENABLED = false -; Buffer length of channel, keep it as it is if you don't know what it is. -SEND_BUFFER_LEN = 100 -; Prefix displayed before subject in mail -SUBJECT_PREFIX = -; Mail server -; Gmail: smtp.gmail.com:587 -; QQ: smtp.qq.com:465 -; Using STARTTLS on port 587 is recommended per RFC 6409. -; Note, if the port ends with "465", SMTPS will be used. -HOST = -; Disable HELO operation when hostnames are different. -DISABLE_HELO = -; Custom hostname for HELO operation, if no value is provided, one is retrieved from system. -HELO_HOSTNAME = -; Whether or not to skip verification of certificates; `true` to disable verification. This option is unsafe. Consider adding the certificate to the system trust store instead. -SKIP_VERIFY = false -; Use client certificate -USE_CERTIFICATE = false -CERT_FILE = custom/mailer/cert.pem -KEY_FILE = custom/mailer/key.pem -; Should SMTP connect with TLS, (if port ends with 465 TLS will always be used.) -; If this is false but STARTTLS is supported the connection will be upgraded to TLS opportunistically. -IS_TLS_ENABLED = false -; Mail from address, RFC 5322. This can be just an email address, or the `"Name" ` format -FROM = -; Mailer user name and password -; Please Note: Authentication is only supported when the SMTP server communication is encrypted with TLS (this can be via STARTTLS) or `HOST=localhost`. -USER = -; Use PASSWD = `your password` for quoting if you use special characters in the password. -PASSWD = -; Send mails as plain text -SEND_AS_PLAIN_TEXT = false -; Set Mailer Type (either SMTP, sendmail or dummy to just send to the log) -MAILER_TYPE = smtp -; Specify an alternative sendmail binary -SENDMAIL_PATH = sendmail -; Specify any extra sendmail arguments -SENDMAIL_ARGS = -; Timeout for Sendmail -SENDMAIL_TIMEOUT = 5m - -[cache] -; if the cache enabled -ENABLED = true -; Either "memory", "redis", or "memcache", default is "memory" -ADAPTER = memory -; For "memory" only, GC interval in seconds, default is 60 -INTERVAL = 60 -; For "redis" and "memcache", connection host address -; redis: network=tcp,addr=:6379,password=macaron,db=0,pool_size=100,idle_timeout=180 -; memcache: `127.0.0.1:11211` -HOST = -; Time to keep items in cache if not used, default is 16 hours. -; Setting it to 0 disables caching -ITEM_TTL = 16h - -; Last commit cache -[cache.last_commit] -; if the cache enabled -ENABLED = true -; Time to keep items in cache if not used, default is 8760 hours. -; Setting it to 0 disables caching -ITEM_TTL = 8760h -; Only enable the cache when repository's commits count great than -COMMITS_COUNT = 1000 - -[session] -; Either "memory", "file", or "redis", default is "memory" -PROVIDER = memory -; Provider config options -; memory: doesn't have any config yet -; file: session file path, e.g. `data/sessions` -; redis: network=tcp,addr=:6379,password=macaron,db=0,pool_size=100,idle_timeout=180 -; mysql: go-sql-driver/mysql dsn config string, e.g. `root:password@/session_table` -PROVIDER_CONFIG = data/sessions -; Session cookie name -COOKIE_NAME = i_like_gitea -; If you use session in https only, default is false -COOKIE_SECURE = false -; Session GC time interval in seconds, default is 86400 (1 day) -GC_INTERVAL_TIME = 86400 -; Session life time in seconds, default is 86400 (1 day) -SESSION_LIFE_TIME = 86400 -; SameSite settings. Either "none", "lax", or "strict" -SAME_SITE=lax - -[picture] -AVATAR_UPLOAD_PATH = data/avatars -REPOSITORY_AVATAR_UPLOAD_PATH = data/repo-avatars -; How Gitea deals with missing repository avatars -; none = no avatar will be displayed; random = random avatar will be displayed; image = default image will be used -REPOSITORY_AVATAR_FALLBACK = none -REPOSITORY_AVATAR_FALLBACK_IMAGE = /img/repo_default.png -; Max Width and Height of uploaded avatars. -; This is to limit the amount of RAM used when resizing the image. -AVATAR_MAX_WIDTH = 4096 -AVATAR_MAX_HEIGHT = 3072 -; Maximum allowed file size for uploaded avatars. -; This is to limit the amount of RAM used when resizing the image. -AVATAR_MAX_FILE_SIZE = 1048576 -; Chinese users can choose "duoshuo" -; or a custom avatar source, like: http://cn.gravatar.com/avatar/ -GRAVATAR_SOURCE = gravatar -; This value will always be true in offline mode. -DISABLE_GRAVATAR = false -; Federated avatar lookup uses DNS to discover avatar associated -; with emails, see https://www.libravatar.org -; This value will always be false in offline mode or when Gravatar is disabled. -ENABLE_FEDERATED_AVATAR = false - -[attachment] -; Whether issue and pull request attachments are enabled. Defaults to `true` -ENABLED = true -; Comma-separated list of allowed file extensions (`.zip`), mime types (`text/plain`) or wildcard type (`image/*`, `audio/*`, `video/*`). Empty value or `*/*` allows all types. -ALLOWED_TYPES = .docx,.gif,.gz,.jpeg,.jpg,.log,.pdf,.png,.pptx,.txt,.xlsx,.zip -; Max size of each file. Defaults to 4MB -MAX_SIZE = 4 -; Max number of files per upload. Defaults to 5 -MAX_FILES = 5 -; Storage type for attachments, `local` for local disk or `minio` for s3 compatible -; object storage service, default is `local`. -STORAGE_TYPE = local -; Allows the storage driver to redirect to authenticated URLs to serve files directly -; Currently, only `minio` is supported. -SERVE_DIRECT = false -; Path for attachments. Defaults to `data/attachments` only available when STORAGE_TYPE is `local` -PATH = data/attachments -; Minio endpoint to connect only available when STORAGE_TYPE is `minio` -MINIO_ENDPOINT = localhost:9000 -; Minio accessKeyID to connect only available when STORAGE_TYPE is `minio` -MINIO_ACCESS_KEY_ID = -; Minio secretAccessKey to connect only available when STORAGE_TYPE is `minio` -MINIO_SECRET_ACCESS_KEY = -; Minio bucket to store the attachments only available when STORAGE_TYPE is `minio` -MINIO_BUCKET = gitea -; Minio location to create bucket only available when STORAGE_TYPE is `minio` -MINIO_LOCATION = us-east-1 -; Minio base path on the bucket only available when STORAGE_TYPE is `minio` -MINIO_BASE_PATH = attachments/ -; Minio enabled ssl only available when STORAGE_TYPE is `minio` -MINIO_USE_SSL = false - -[time] -; Specifies the format for fully outputted dates. Defaults to RFC1123 -; Special supported values are ANSIC, UnixDate, RubyDate, RFC822, RFC822Z, RFC850, RFC1123, RFC1123Z, RFC3339, RFC3339Nano, Kitchen, Stamp, StampMilli, StampMicro and StampNano -; For more information about the format see http://golang.org/pkg/time/#pkg-constants -FORMAT = -; Location the UI time display i.e. Asia/Shanghai -; Empty means server's location setting -DEFAULT_UI_LOCATION = +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Time limit to confirm account/email registration +;ACTIVE_CODE_LIVE_MINUTES = 180 +;; +;; Time limit to perform the reset of a forgotten password +;RESET_PASSWD_CODE_LIVE_MINUTES = 180 +;; +;; Whether a new user needs to confirm their email when registering. +;REGISTER_EMAIL_CONFIRM = false +;; +;; Whether a new user needs to be confirmed manually after registration. (Requires `REGISTER_EMAIL_CONFIRM` to be disabled.) +;REGISTER_MANUAL_CONFIRM = false +;; +;; List of domain names that are allowed to be used to register on a Gitea instance +;; gitea.io,example.com +;EMAIL_DOMAIN_WHITELIST = +;; +;; Comma-separated list of domain names that are not allowed to be used to register on a Gitea instance +;EMAIL_DOMAIN_BLOCKLIST = +;; +;; Disallow registration, only allow admins to create accounts. +;DISABLE_REGISTRATION = false +;; +;; Allow registration only using gitea itself, it works only when DISABLE_REGISTRATION is false +;ALLOW_ONLY_INTERNAL_REGISTRATION = false +;; +;; Allow registration only using third-party services, it works only when DISABLE_REGISTRATION is false +;ALLOW_ONLY_EXTERNAL_REGISTRATION = false +;; +;; User must sign in to view anything. +;REQUIRE_SIGNIN_VIEW = false +;; +;; Mail notification +;ENABLE_NOTIFY_MAIL = false +;; +;; This setting enables gitea to be signed in with HTTP BASIC Authentication using the user's password +;; If you set this to false you will not be able to access the tokens endpoints on the API with your password +;; Please note that setting this to false will not disable OAuth Basic or Basic authentication using a token +;ENABLE_BASIC_AUTHENTICATION = true +;; +;; More detail: https://github.com/gogits/gogs/issues/165 +;ENABLE_REVERSE_PROXY_AUTHENTICATION = false +;ENABLE_REVERSE_PROXY_AUTO_REGISTRATION = false +;ENABLE_REVERSE_PROXY_EMAIL = false +;; +;; Enable captcha validation for registration +;ENABLE_CAPTCHA = false +;; +;; Type of captcha you want to use. Options: image, recaptcha, hcaptcha +;CAPTCHA_TYPE = image +;; +;; Enable recaptcha to use Google's recaptcha service +;; Go to https://www.google.com/recaptcha/admin to sign up for a key +;RECAPTCHA_SECRET = +;RECAPTCHA_SITEKEY = +;; +;; For hCaptcha, create an account at https://accounts.hcaptcha.com/login to get your keys +;HCAPTCHA_SECRET = +;HCAPTCHA_SITEKEY = +;; +;; Change this to use recaptcha.net or other recaptcha service +;RECAPTCHA_URL = https://www.google.com/recaptcha/ +;; +;; Default value for KeepEmailPrivate +;; Each new user will get the value of this setting copied into their profile +;DEFAULT_KEEP_EMAIL_PRIVATE = false +;; +;; Default value for AllowCreateOrganization +;; Every new user will have rights set to create organizations depending on this setting +;DEFAULT_ALLOW_CREATE_ORGANIZATION = true +;; +;; Either "public", "limited" or "private", default is "public" +;; Limited is for users visible only to signed users +;; Private is for users visible only to members of their organizations +;; Public is for users visible for everyone +;DEFAULT_USER_VISIBILITY = public +;; +;; Set which visibility modes a user can have +;ALLOWED_USER_VISIBILITY_MODES = public,limited,private +;; +;; Either "public", "limited" or "private", default is "public" +;; Limited is for organizations visible only to signed users +;; Private is for organizations visible only to members of the organization +;; Public is for organizations visible to everyone +;DEFAULT_ORG_VISIBILITY = public +;; +;; Default value for DefaultOrgMemberVisible +;; True will make the membership of the users visible when added to the organisation +;DEFAULT_ORG_MEMBER_VISIBLE = false +;; +;; Default value for EnableDependencies +;; Repositories will use dependencies by default depending on this setting +;DEFAULT_ENABLE_DEPENDENCIES = true +;; +;; Dependencies can be added from any repository where the user is granted access or only from the current repository depending on this setting. +;ALLOW_CROSS_REPOSITORY_DEPENDENCIES = true +;; +;; Enable heatmap on users profiles. +;ENABLE_USER_HEATMAP = true +;; +;; Enable Timetracking +;ENABLE_TIMETRACKING = true +;; +;; Default value for EnableTimetracking +;; Repositories will use timetracking by default depending on this setting +;DEFAULT_ENABLE_TIMETRACKING = true +;; +;; Default value for AllowOnlyContributorsToTrackTime +;; Only users with write permissions can track time if this is true +;DEFAULT_ALLOW_ONLY_CONTRIBUTORS_TO_TRACK_TIME = true +;; +;; Value for the domain part of the user's email address in the git log if user +;; has set KeepEmailPrivate to true. The user's email will be replaced with a +;; concatenation of the user name in lower case, "@" and NO_REPLY_ADDRESS. Default +;; value is "noreply." + DOMAIN, where DOMAIN resolves to the value from server.DOMAIN +;; Note: do not use the notation below +;NO_REPLY_ADDRESS = ; noreply. +;; +;; Show Registration button +;SHOW_REGISTRATION_BUTTON = true +;; +;; Show milestones dashboard page - a view of all the user's milestones +;SHOW_MILESTONES_DASHBOARD_PAGE = true +;; +;; Default value for AutoWatchNewRepos +;; When adding a repo to a team or creating a new repo all team members will watch the +;; repo automatically if enabled +;AUTO_WATCH_NEW_REPOS = true +;; +;; Default value for AutoWatchOnChanges +;; Make the user watch a repository When they commit for the first time +;AUTO_WATCH_ON_CHANGES = false +;; +;; Minimum amount of time a user must exist before comments are kept when the user is deleted. +;USER_DELETE_WITH_COMMENTS_MAX_TIME = 0 +;; Valid site url schemes for user profiles +;VALID_SITE_URL_SCHEMES=http,https + + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Other Settings +;; +;; Uncomment the [section.header] if you wish to +;; set the below settings. +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[repository] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Root path for storing all repository data. It must be an absolute path. By default, it is stored in a sub-directory of `APP_DATA_PATH`. +;ROOT = +;; +;; The script type this server supports. Usually this is `bash`, but some users report that only `sh` is available. +;SCRIPT_TYPE = bash +;; +;; DETECTED_CHARSETS_ORDER tie-break order for detected charsets. +;; If the charsets have equal confidence, tie-breaking will be done by order in this list +;; with charsets earlier in the list chosen in preference to those later. +;; Adding "defaults" will place the unused charsets at that position. +;DETECTED_CHARSETS_ORDER = UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, UTF-32LE, ISO-8859, windows-1252, ISO-8859, windows-1250, ISO-8859, ISO-8859, ISO-8859, windows-1253, ISO-8859, windows-1255, ISO-8859, windows-1251, windows-1256, KOI8-R, ISO-8859, windows-1254, Shift_JIS, GB18030, EUC-JP, EUC-KR, Big5, ISO-2022, ISO-2022, ISO-2022, IBM424_rtl, IBM424_ltr, IBM420_rtl, IBM420_ltr +;; +;; Default ANSI charset to override non-UTF-8 charsets to +;ANSI_CHARSET = +;; +;; Force every new repository to be private +;FORCE_PRIVATE = false +;; +;; Default privacy setting when creating a new repository, allowed values: last, private, public. Default is last which means the last setting used. +;DEFAULT_PRIVATE = last +;; +;; Default private when using push-to-create +;DEFAULT_PUSH_CREATE_PRIVATE = true +;; +;; Global limit of repositories per user, applied at creation time. -1 means no limit +;MAX_CREATION_LIMIT = -1 +;; +;; Mirror sync queue length, increase if mirror syncing starts hanging +;MIRROR_QUEUE_LENGTH = 1000 +;; +;; Patch test queue length, increase if pull request patch testing starts hanging +;PULL_REQUEST_QUEUE_LENGTH = 1000 +;; +;; Preferred Licenses to place at the top of the List +;; The name here must match the filename in conf/license or custom/conf/license +;PREFERRED_LICENSES = Apache License 2.0,MIT License +;; +;; Disable the ability to interact with repositories using the HTTP protocol +;;DISABLE_HTTP_GIT = false +;; +;; Value for Access-Control-Allow-Origin header, default is not to present +;; WARNING: This may be harmful to your website if you do not give it a right value. +;ACCESS_CONTROL_ALLOW_ORIGIN = +;; +;; Force ssh:// clone url instead of scp-style uri when default SSH port is used +;USE_COMPAT_SSH_URI = false +;; +;; Close issues as long as a commit on any branch marks it as fixed +;; Comma separated list of globally disabled repo units. Allowed values: repo.issues, repo.ext_issues, repo.pulls, repo.wiki, repo.ext_wiki +;DISABLED_REPO_UNITS = +;; +;; Comma separated list of default repo units. Allowed values: repo.code, repo.releases, repo.issues, repo.pulls, repo.wiki, repo.projects. +;; Note: Code and Releases can currently not be deactivated. If you specify default repo units you should still list them for future compatibility. +;; External wiki and issue tracker can't be enabled by default as it requires additional settings. +;; Disabled repo units will not be added to new repositories regardless if it is in the default list. +;DEFAULT_REPO_UNITS = repo.code,repo.releases,repo.issues,repo.pulls,repo.wiki,repo.projects +;; +;; Prefix archive files by placing them in a directory named after the repository +;PREFIX_ARCHIVE_FILES = true +;; +;; Disable the creation of new mirrors. Pre-existing mirrors remain valid. +;DISABLE_MIRRORS = false +;; +;; Disable migrating feature. +;DISABLE_MIGRATIONS = false +;; +;; Disable stars feature. +;DISABLE_STARS = false +;; +;; The default branch name of new repositories +;DEFAULT_BRANCH = master +;; +;; Allow adoption of unadopted repositories +;ALLOW_ADOPTION_OF_UNADOPTED_REPOSITORIES = false +;; +;; Allow deletion of unadopted repositories +;ALLOW_DELETION_OF_UNADOPTED_REPOSITORIES = false + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[repository.editor] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; List of file extensions for which lines should be wrapped in the Monaco editor +;; Separate extensions with a comma. To line wrap files without an extension, just put a comma +;LINE_WRAP_EXTENSIONS = .txt,.md,.markdown,.mdown,.mkd, +;; +;; Valid file modes that have a preview API associated with them, such as api/v1/markdown +;; Separate the values by commas. The preview tab in edit mode won't be displayed if the file extension doesn't match +;PREVIEWABLE_FILE_MODES = markdown + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[repository.local] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Path for local repository copy. Defaults to `tmp/local-repo` +;LOCAL_COPY_PATH = tmp/local-repo + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[repository.upload] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Whether repository file uploads are enabled. Defaults to `true` +;ENABLED = true +;; +;; Path for uploads. Defaults to `data/tmp/uploads` (tmp gets deleted on gitea restart) +;TEMP_PATH = data/tmp/uploads +;; +;; Comma-separated list of allowed file extensions (`.zip`), mime types (`text/plain`) or wildcard type (`image/*`, `audio/*`, `video/*`). Empty value or `*/*` allows all types. +;ALLOWED_TYPES = +;; +;; Max size of each file in megabytes. Defaults to 3MB +;FILE_MAX_SIZE = 3 +;; +;; Max number of files per upload. Defaults to 5 +;MAX_FILES = 5 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[repository.pull-request] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; List of prefixes used in Pull Request title to mark them as Work In Progress +;WORK_IN_PROGRESS_PREFIXES = WIP:,[WIP] +;; +;; List of keywords used in Pull Request comments to automatically close a related issue +;CLOSE_KEYWORDS = close,closes,closed,fix,fixes,fixed,resolve,resolves,resolved +;; +;; List of keywords used in Pull Request comments to automatically reopen a related issue +;REOPEN_KEYWORDS = reopen,reopens,reopened +;; +;; In the default merge message for squash commits include at most this many commits +;DEFAULT_MERGE_MESSAGE_COMMITS_LIMIT = 50 +;; +;; In the default merge message for squash commits limit the size of the commit messages to this +;DEFAULT_MERGE_MESSAGE_SIZE = 5120 +;; +;; In the default merge message for squash commits walk all commits to include all authors in the Co-authored-by otherwise just use those in the limited list +;DEFAULT_MERGE_MESSAGE_ALL_AUTHORS = false +;; +;; In default merge messages limit the number of approvers listed as Reviewed-by: to this many +;DEFAULT_MERGE_MESSAGE_MAX_APPROVERS = 10 +;; +;; In default merge messages only include approvers who are official +;DEFAULT_MERGE_MESSAGE_OFFICIAL_APPROVERS_ONLY = true + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[repository.issue] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; List of reasons why a Pull Request or Issue can be locked +;LOCK_REASONS = Too heated,Off-topic,Resolved,Spam + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[repository.release] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Comma-separated list of allowed file extensions (`.zip`), mime types (`text/plain`) or wildcard type (`image/*`, `audio/*`, `video/*`). Empty value or `*/*` allows all types. +;ALLOWED_TYPES = + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[repository.signing] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; GPG key to use to sign commits, Defaults to the default - that is the value of git config --get user.signingkey +;; run in the context of the RUN_USER +;; Switch to none to stop signing completely +;SIGNING_KEY = default +;; +;; If a SIGNING_KEY ID is provided and is not set to default, use the provided Name and Email address as the signer. +;; These should match a publicized name and email address for the key. (When SIGNING_KEY is default these are set to +;; the results of git config --get user.name and git config --get user.email respectively and can only be overridden +;; by setting the SIGNING_KEY ID to the correct ID.) +;SIGNING_NAME = +;SIGNING_EMAIL = +;; +;; Sets the default trust model for repositories. Options are: collaborator, committer, collaboratorcommitter +;DEFAULT_TRUST_MODEL = collaborator +;; +;; Determines when gitea should sign the initial commit when creating a repository +;; Either: +;; - never +;; - pubkey: only sign if the user has a pubkey +;; - twofa: only sign if the user has logged in with twofa +;; - always +;; options other than none and always can be combined as comma separated list +;INITIAL_COMMIT = always +;; +;; Determines when to sign for CRUD actions +;; - as above +;; - parentsigned: requires that the parent commit is signed. +;CRUD_ACTIONS = pubkey, twofa, parentsigned +;; Determines when to sign Wiki commits +;; - as above +;WIKI = never +;; +;; Determines when to sign on merges +;; - basesigned: require that the parent of commit on the base repo is signed. +;; - commitssigned: require that all the commits in the head branch are signed. +;; - approved: only sign when merging an approved pr to a protected branch +;MERGES = pubkey, twofa, basesigned, commitssigned + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[repository.mimetype_mapping] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Custom MIME type mapping for downloadable files +;.apk=application/vnd.android.package-archive + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[project] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Default templates for project boards +;PROJECT_BOARD_BASIC_KANBAN_TYPE = To Do, In Progress, Done +;PROJECT_BOARD_BUG_TRIAGE_TYPE = Needs Triage, High Priority, Low Priority, Closed + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cors] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; More information about CORS can be found here: https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS#The_HTTP_response_headers +;; enable cors headers (disabled by default) +;ENABLED = false +;; +;; scheme of allowed requests +;SCHEME = http +;; +;; list of requesting domains that are allowed +;ALLOW_DOMAIN = * +;; +;; allow subdomains of headers listed above to request +;ALLOW_SUBDOMAIN = false +;; +;; list of methods allowed to request +;METHODS = GET,HEAD,POST,PUT,PATCH,DELETE,OPTIONS +;; +;; max time to cache response +;MAX_AGE = 10m +;; +;; allow request with credentials +;ALLOW_CREDENTIALS = false + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[ui] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Number of repositories that are displayed on one explore page +;EXPLORE_PAGING_NUM = 20 +;; +;; Number of issues that are displayed on one page +;ISSUE_PAGING_NUM = 10 +;; +;; Number of maximum commits displayed in one activity feed +;FEED_MAX_COMMIT_NUM = 5 +;; +;; Number of items that are displayed in home feed +;FEED_PAGING_NUM = 20 +;; +;; Number of maximum commits displayed in commit graph. +;GRAPH_MAX_COMMIT_NUM = 100 +;; +;; Number of line of codes shown for a code comment +;CODE_COMMENT_LINES = 4 +;; +;; Value of `theme-color` meta tag, used by Android >= 5.0 +;; An invalid color like "none" or "disable" will have the default style +;; More info: https://developers.google.com/web/updates/2014/11/Support-for-theme-color-in-Chrome-39-for-Android +;THEME_COLOR_META_TAG = `#6cc644` +;; +;; Max size of files to be displayed (default is 8MiB) +;MAX_DISPLAY_FILE_SIZE = 8388608 +;; +;; Whether the email of the user should be shown in the Explore Users page +;SHOW_USER_EMAIL = true +;; +;; Set the default theme for the Gitea install +;DEFAULT_THEME = gitea +;; +;; All available themes. Allow users select personalized themes regardless of the value of `DEFAULT_THEME`. +;THEMES = gitea,arc-green +;; +;; All available reactions users can choose on issues/prs and comments. +;; Values can be emoji alias (:smile:) or a unicode emoji. +;; For custom reactions, add a tightly cropped square image to public/img/emoji/reaction_name.png +;REACTIONS = +1, -1, laugh, hooray, confused, heart, rocket, eyes +;; +;; Additional Emojis not defined in the utf8 standard +;; By default we support gitea (:gitea:), to add more copy them to public/img/emoji/emoji_name.png and add it to this config. +;; Dont mistake it for Reactions. +;CUSTOM_EMOJIS = gitea, codeberg, gitlab, git, github, gogs +;; +;; Whether the full name of the users should be shown where possible. If the full name isn't set, the username will be used. +;DEFAULT_SHOW_FULL_NAME = false +;; +;; Whether to search within description at repository search on explore page. +;SEARCH_REPO_DESCRIPTION = true +;; +;; Whether to enable a Service Worker to cache frontend assets +;USE_SERVICE_WORKER = true + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[ui.admin] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Number of users that are displayed on one page +;USER_PAGING_NUM = 50 +;; +;; Number of repos that are displayed on one page +;REPO_PAGING_NUM = 50 +;; +;; Number of notices that are displayed on one page +;NOTICE_PAGING_NUM = 25 +;; +;; Number of organizations that are displayed on one page +;ORG_PAGING_NUM = 50 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[ui.user] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Number of repos that are displayed on one page +;REPO_PAGING_NUM = 15 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[ui.meta] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;AUTHOR = Gitea - Git with a cup of tea +;DESCRIPTION = Gitea (Git with a cup of tea) is a painless self-hosted Git service written in Go +;KEYWORDS = go,git,self-hosted,gitea + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[ui.notification] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Control how often the notification endpoint is polled to update the notification +;; The timeout will increase to MAX_TIMEOUT in TIMEOUT_STEPs if the notification count is unchanged +;; Set MIN_TIMEOUT to 0 to turn off +;MIN_TIMEOUT = 10s +;MAX_TIMEOUT = 60s +;TIMEOUT_STEP = 10s +;; +;; This setting determines how often the db is queried to get the latest notification counts. +;; If the browser client supports EventSource and SharedWorker, a SharedWorker will be used in preference to polling notification. Set to -1 to disable the EventSource +;EVENT_SOURCE_UPDATE_TIME = 10s + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[ui.svg] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Whether to render SVG files as images. If SVG rendering is disabled, SVG files are displayed as text and cannot be embedded in markdown files as images. +;ENABLE_RENDER = true + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[ui.csv] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Maximum allowed file size in bytes to render CSV files as table. (Set to 0 for no limit). +;MAX_FILE_SIZE = 524288 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[markdown] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Render soft line breaks as hard line breaks, which means a single newline character between +;; paragraphs will cause a line break and adding trailing whitespace to paragraphs is not +;; necessary to force a line break. +;; Render soft line breaks as hard line breaks for comments +;ENABLE_HARD_LINE_BREAK_IN_COMMENTS = true +;; +;; Render soft line breaks as hard line breaks for markdown documents +;ENABLE_HARD_LINE_BREAK_IN_DOCUMENTS = false +;; +;; Comma separated list of custom URL-Schemes that are allowed as links when rendering Markdown +;; for example git,magnet,ftp (more at https://en.wikipedia.org/wiki/List_of_URI_schemes) +;; URLs starting with http and https are always displayed, whatever is put in this entry. +;CUSTOM_URL_SCHEMES = +;; +;; List of file extensions that should be rendered/edited as Markdown +;; Separate the extensions with a comma. To render files without any extension as markdown, just put a comma +;FILE_EXTENSIONS = .md,.markdown,.mdown,.mkd + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[ssh.minimum_key_sizes] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Define allowed algorithms and their minimum key length (use -1 to disable a type) +;ED25519 = 256 +;ECDSA = 256 +;RSA = 2048 +;DSA = -1 ; set to 1024 to switch on + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[indexer] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Issue Indexer settings +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Issue indexer type, currently support: bleve, db or elasticsearch, default is bleve +;ISSUE_INDEXER_TYPE = bleve +;; +;; Issue indexer storage path, available when ISSUE_INDEXER_TYPE is bleve +;ISSUE_INDEXER_PATH = indexers/issues.bleve +;; +;; Issue indexer connection string, available when ISSUE_INDEXER_TYPE is elasticsearch +;ISSUE_INDEXER_CONN_STR = http://elastic:changeme@localhost:9200 +;; +;; Issue indexer name, available when ISSUE_INDEXER_TYPE is elasticsearch +;ISSUE_INDEXER_NAME = gitea_issues +;; +;; Timeout the indexer if it takes longer than this to start. +;; Set to zero to disable timeout. +;STARTUP_TIMEOUT = 30s +;; +;; Issue indexer queue, currently support: channel, levelqueue or redis, default is levelqueue (deprecated - use [queue.issue_indexer]) +;ISSUE_INDEXER_QUEUE_TYPE = levelqueue; **DEPRECATED** use settings in `[queue.issue_indexer]`. +;; +;; When ISSUE_INDEXER_QUEUE_TYPE is levelqueue, this will be the path where the queue will be saved. +;; This can be overridden by `ISSUE_INDEXER_QUEUE_CONN_STR`. +;; default is queues/common +;ISSUE_INDEXER_QUEUE_DIR = queues/common; **DEPRECATED** use settings in `[queue.issue_indexer]`. +;; +;; When `ISSUE_INDEXER_QUEUE_TYPE` is `redis`, this will store the redis connection string. +;; When `ISSUE_INDEXER_QUEUE_TYPE` is `levelqueue`, this is a directory or additional options of +;; the form `leveldb://path/to/db?option=value&....`, and overrides `ISSUE_INDEXER_QUEUE_DIR`. +;ISSUE_INDEXER_QUEUE_CONN_STR = "addrs=127.0.0.1:6379 db=0"; **DEPRECATED** use settings in `[queue.issue_indexer]`. +;; +;; Batch queue number, default is 20 +;ISSUE_INDEXER_QUEUE_BATCH_NUMBER = 20; **DEPRECATED** use settings in `[queue.issue_indexer]`. + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Repository Indexer settings +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; repo indexer by default disabled, since it uses a lot of disk space +;REPO_INDEXER_ENABLED = false +;; +;; Code search engine type, could be `bleve` or `elasticsearch`. +;REPO_INDEXER_TYPE = bleve +;; +;; Index file used for code search. available when `REPO_INDEXER_TYPE` is bleve +;REPO_INDEXER_PATH = indexers/repos.bleve +;; +;; Code indexer connection string, available when `REPO_INDEXER_TYPE` is elasticsearch. i.e. http://elastic:changeme@localhost:9200 +;REPO_INDEXER_CONN_STR = +;; +;; Code indexer name, available when `REPO_INDEXER_TYPE` is elasticsearch +;REPO_INDEXER_NAME = gitea_codes +;; +;; A comma separated list of glob patterns (see https://github.com/gobwas/glob) to include +;; in the index; default is empty +;REPO_INDEXER_INCLUDE = +;; +;; A comma separated list of glob patterns to exclude from the index; ; default is empty +;REPO_INDEXER_EXCLUDE = +;; +;; +;UPDATE_BUFFER_LEN = 20; **DEPRECATED** use settings in `[queue.issue_indexer]`. +;MAX_FILE_SIZE = 1048576 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[queue] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Specific queues can be individually configured with [queue.name]. [queue] provides defaults +;; ([queue.issue_indexer] is special due to the old configuration described above) +;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; General queue queue type, currently support: persistable-channel, channel, level, redis, dummy +;; default to persistable-channel +;TYPE = persistable-channel +;; +;; data-dir for storing persistable queues and level queues, individual queues will default to `queues/common` meaning the queue is shared. +;DATADIR = queues/ +;; +;; Default queue length before a channel queue will block +;LENGTH = 20 +;; +;; Batch size to send for batched queues +;BATCH_LENGTH = 20 +;; +;; Connection string for redis queues this will store the redis connection string. +;; When `TYPE` is `persistable-channel`, this provides a directory for the underlying leveldb +;; or additional options of the form `leveldb://path/to/db?option=value&....`, and will override `DATADIR`. +;CONN_STR = "addrs=127.0.0.1:6379 db=0" +;; +;; Provides the suffix of the default redis/disk queue name - specific queues can be overridden within in their [queue.name] sections. +;QUEUE_NAME = "_queue" +;; +;; Provides the suffix of the default redis/disk unique queue set name - specific queues can be overridden within in their [queue.name] sections. +;SET_NAME = "_unique" +;; +;; If the queue cannot be created at startup - level queues may need a timeout at startup - wrap the queue: +;WRAP_IF_NECESSARY = true +;; +;; Attempt to create the wrapped queue at max +;MAX_ATTEMPTS = 10 +;; +;; Timeout queue creation +;TIMEOUT = 15m30s +;; +;; Create a pool with this many workers +;WORKERS = 0 +;; +;; Dynamically scale the worker pool to at this many workers +;MAX_WORKERS = 10 +;; +;; Add boost workers when the queue blocks for BLOCK_TIMEOUT +;BLOCK_TIMEOUT = 1s +;; +;; Remove the boost workers after BOOST_TIMEOUT +;BOOST_TIMEOUT = 5m +;; +;; During a boost add BOOST_WORKERS +;BOOST_WORKERS = 1 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[admin] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Disallow regular (non-admin) users from creating organizations. +;DISABLE_REGULAR_ORG_CREATION = false +;; +;; Default configuration for email notifications for users (user configurable). Options: enabled, onmention, disabled +;DEFAULT_EMAIL_NOTIFICATIONS = enabled + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[openid] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; OpenID is an open, standard and decentralized authentication protocol. +;; Your identity is the address of a webpage you provide, which describes +;; how to prove you are in control of that page. +;; +;; For more info: https://en.wikipedia.org/wiki/OpenID +;; +;; Current implementation supports OpenID-2.0 +;; +;; Tested to work providers at the time of writing: +;; - Any GNUSocial node (your.hostname.tld/username) +;; - Any SimpleID provider (http://simpleid.koinic.net) +;; - http://openid.org.cn/ +;; - openid.stackexchange.com +;; - login.launchpad.net +;; - .livejournal.com +;; +;; Whether to allow signin in via OpenID +;ENABLE_OPENID_SIGNIN = true +;; +;; Whether to allow registering via OpenID +;; Do not include to rely on rhw DISABLE_REGISTRATION setting +;;ENABLE_OPENID_SIGNUP = true +;; +;; Allowed URI patterns (POSIX regexp). +;; Space separated. +;; Only these would be allowed if non-blank. +;; Example value: trusted.domain.org trusted.domain.net +;WHITELISTED_URIS = +;; +;; Forbidden URI patterns (POSIX regexp). +;; Space separated. +;; Only used if WHITELISTED_URIS is blank. +;; Example value: loadaverage.org/badguy stackexchange.com/.*spammer +;BLACKLISTED_URIS = + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[oauth2_client] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Whether a new auto registered oauth2 user needs to confirm their email. +;; Do not include to use the REGISTER_EMAIL_CONFIRM setting from the `[service]` section. +;REGISTER_EMAIL_CONFIRM = +;; +;; Scopes for the openid connect oauth2 provider (separated by space, the openid scope is implicitly added). +;; Typical values are profile and email. +;; For more information about the possible values see https://openid.net/specs/openid-connect-core-1_0.html#ScopeClaims +;OPENID_CONNECT_SCOPES = +;; +;; Automatically create user accounts for new oauth2 users. +;ENABLE_AUTO_REGISTRATION = false +;; +;; The source of the username for new oauth2 accounts: +;; userid = use the userid / sub attribute +;; nickname = use the nickname attribute +;; email = use the username part of the email attribute +;USERNAME = nickname +;; +;; Update avatar if available from oauth2 provider. +;; Update will be performed on each login. +;UPDATE_AVATAR = false +;; +;; How to handle if an account / email already exists: +;; disabled = show an error +;; login = show an account linking login +;; auto = link directly with the account +;ACCOUNT_LINKING = login + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[webhook] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Hook task queue length, increase if webhook shooting starts hanging +;QUEUE_LENGTH = 1000 +;; +;; Deliver timeout in seconds +;DELIVER_TIMEOUT = 5 +;; +;; Allow insecure certification +;SKIP_TLS_VERIFY = false +;; +;; Number of history information in each page +;PAGING_NUM = 10 +;; +;; Proxy server URL, support http://, https//, socks://, blank will follow environment http_proxy/https_proxy +;PROXY_URL = +;; +;; Comma separated list of host names requiring proxy. Glob patterns (*) are accepted; use ** to match all hosts. +;PROXY_HOSTS = + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[mailer] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;ENABLED = false +;; +;; Buffer length of channel, keep it as it is if you don't know what it is. +;SEND_BUFFER_LEN = 100 +;; +;; Prefix displayed before subject in mail +;SUBJECT_PREFIX = +;; +;; Mail server +;; Gmail: smtp.gmail.com:587 +;; QQ: smtp.qq.com:465 +;; As per RFC 8314 using Implicit TLS/SMTPS on port 465 (if supported) is recommended, +;; otherwise STARTTLS on port 587 should be used. +;HOST = +;; +;; Disable HELO operation when hostnames are different. +;DISABLE_HELO = +;; +;; Custom hostname for HELO operation, if no value is provided, one is retrieved from system. +;HELO_HOSTNAME = +;; +;; Whether or not to skip verification of certificates; `true` to disable verification. This option is unsafe. Consider adding the certificate to the system trust store instead. +;SKIP_VERIFY = false +;; +;; Use client certificate +;USE_CERTIFICATE = false +;CERT_FILE = custom/mailer/cert.pem +;KEY_FILE = custom/mailer/key.pem +;; +;; Should SMTP connect with TLS, (if port ends with 465 TLS will always be used.) +;; If this is false but STARTTLS is supported the connection will be upgraded to TLS opportunistically. +;IS_TLS_ENABLED = false +;; +;; Mail from address, RFC 5322. This can be just an email address, or the `"Name" ` format +;FROM = +;; +;; Mailer user name and password +;; Please Note: Authentication is only supported when the SMTP server communication is encrypted with TLS (this can be via STARTTLS) or `HOST=localhost`. +;USER = +;; +;; Use PASSWD = `your password` for quoting if you use special characters in the password. +;PASSWD = +;; +;; Send mails as plain text +;SEND_AS_PLAIN_TEXT = false +;; +;; Set Mailer Type (either SMTP, sendmail or dummy to just send to the log) +;MAILER_TYPE = smtp +;; +;; Specify an alternative sendmail binary +;SENDMAIL_PATH = sendmail +;; +;; Specify any extra sendmail arguments +;SENDMAIL_ARGS = +;; +;; Timeout for Sendmail +;SENDMAIL_TIMEOUT = 5m + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cache] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; if the cache enabled +;ENABLED = true +;; +;; Either "memory", "redis", "memcache", or "twoqueue". default is "memory" +;ADAPTER = memory +;; +;; For "memory" only, GC interval in seconds, default is 60 +;INTERVAL = 60 +;; +;; For "redis" and "memcache", connection host address +;; redis: network=tcp,addr=:6379,password=macaron,db=0,pool_size=100,idle_timeout=180 +;; memcache: `127.0.0.1:11211` +;; twoqueue: `{"size":50000,"recent_ratio":0.25,"ghost_ratio":0.5}` or `50000` +;HOST = +;; +;; Time to keep items in cache if not used, default is 16 hours. +;; Setting it to 0 disables caching +;ITEM_TTL = 16h + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Last commit cache +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cache.last_commit] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; if the cache enabled +;ENABLED = true +;; +;; Time to keep items in cache if not used, default is 8760 hours. +;; Setting it to 0 disables caching +;ITEM_TTL = 8760h +;; +;; Only enable the cache when repository's commits count great than +;COMMITS_COUNT = 1000 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[session] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Either "memory", "file", or "redis", default is "memory" +;PROVIDER = memory +;; +;; Provider config options +;; memory: doesn't have any config yet +;; file: session file path, e.g. `data/sessions` +;; redis: network=tcp,addr=:6379,password=macaron,db=0,pool_size=100,idle_timeout=180 +;; mysql: go-sql-driver/mysql dsn config string, e.g. `root:password@/session_table` +;PROVIDER_CONFIG = data/sessions +;; +;; Session cookie name +;COOKIE_NAME = i_like_gitea +;; +;; If you use session in https only, default is false +;COOKIE_SECURE = false +;; +;; Session GC time interval in seconds, default is 86400 (1 day) +;GC_INTERVAL_TIME = 86400 +;; +;; Session life time in seconds, default is 86400 (1 day) +;SESSION_LIFE_TIME = 86400 +;; +;; SameSite settings. Either "none", "lax", or "strict" +;SAME_SITE=lax + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[picture] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;AVATAR_UPLOAD_PATH = data/avatars +;REPOSITORY_AVATAR_UPLOAD_PATH = data/repo-avatars +;; +;; How Gitea deals with missing repository avatars +;; none = no avatar will be displayed; random = random avatar will be displayed; image = default image will be used +;REPOSITORY_AVATAR_FALLBACK = none +;REPOSITORY_AVATAR_FALLBACK_IMAGE = /img/repo_default.png +;; +;; Max Width and Height of uploaded avatars. +;; This is to limit the amount of RAM used when resizing the image. +;AVATAR_MAX_WIDTH = 4096 +;AVATAR_MAX_HEIGHT = 3072 +;; +;; Maximum allowed file size for uploaded avatars. +;; This is to limit the amount of RAM used when resizing the image. +;AVATAR_MAX_FILE_SIZE = 1048576 +;; +;; Chinese users can choose "duoshuo" +;; or a custom avatar source, like: http://cn.gravatar.com/avatar/ +;GRAVATAR_SOURCE = gravatar +;; +;; This value will always be true in offline mode. +;DISABLE_GRAVATAR = false +;; +;; Federated avatar lookup uses DNS to discover avatar associated +;; with emails, see https://www.libravatar.org +;; This value will always be false in offline mode or when Gravatar is disabled. +;ENABLE_FEDERATED_AVATAR = false + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[attachment] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Whether issue and pull request attachments are enabled. Defaults to `true` +;ENABLED = true +;; +;; Comma-separated list of allowed file extensions (`.zip`), mime types (`text/plain`) or wildcard type (`image/*`, `audio/*`, `video/*`). Empty value or `*/*` allows all types. +;ALLOWED_TYPES = .docx,.gif,.gz,.jpeg,.jpg,.log,.pdf,.png,.pptx,.txt,.xlsx,.zip +;; +;; Max size of each file. Defaults to 4MB +;MAX_SIZE = 4 +;; +;; Max number of files per upload. Defaults to 5 +;MAX_FILES = 5 +;; +;; Storage type for attachments, `local` for local disk or `minio` for s3 compatible +;; object storage service, default is `local`. +;STORAGE_TYPE = local +;; +;; Allows the storage driver to redirect to authenticated URLs to serve files directly +;; Currently, only `minio` is supported. +;SERVE_DIRECT = false +;; +;; Path for attachments. Defaults to `data/attachments` only available when STORAGE_TYPE is `local` +;PATH = data/attachments +;; +;; Minio endpoint to connect only available when STORAGE_TYPE is `minio` +;MINIO_ENDPOINT = localhost:9000 +;; +;; Minio accessKeyID to connect only available when STORAGE_TYPE is `minio` +;MINIO_ACCESS_KEY_ID = +;; +;; Minio secretAccessKey to connect only available when STORAGE_TYPE is `minio` +;MINIO_SECRET_ACCESS_KEY = +;; +;; Minio bucket to store the attachments only available when STORAGE_TYPE is `minio` +;MINIO_BUCKET = gitea +;; +;; Minio location to create bucket only available when STORAGE_TYPE is `minio` +;MINIO_LOCATION = us-east-1 +;; +;; Minio base path on the bucket only available when STORAGE_TYPE is `minio` +;MINIO_BASE_PATH = attachments/ +;; +;; Minio enabled ssl only available when STORAGE_TYPE is `minio` +;MINIO_USE_SSL = false -[log] -ROOT_PATH = -; Either "console", "file", "conn", "smtp" or "database", default is "console" -; Use comma to separate multiple modes, e.g. "console, file" -MODE = console -; Buffer length of the channel, keep it as it is if you don't know what it is. -BUFFER_LEN = 10000 -; Either "Trace", "Debug", "Info", "Warn", "Error", "Critical", default is "Info" -ROUTER_LOG_LEVEL = Info -ROUTER = console -ENABLE_ACCESS_LOG = false -ACCESS_LOG_TEMPLATE = {{.Ctx.RemoteAddr}} - {{.Identity}} {{.Start.Format "[02/Jan/2006:15:04:05 -0700]" }} "{{.Ctx.Req.Method}} {{.Ctx.Req.URL.RequestURI}} {{.Ctx.Req.Proto}}" {{.ResponseWriter.Status}} {{.ResponseWriter.Size}} "{{.Ctx.Req.Referer}}\" \"{{.Ctx.Req.UserAgent}}" -ACCESS = file -; Either "Trace", "Debug", "Info", "Warn", "Error", "Critical", default is "Trace" -LEVEL = Info -; Either "Trace", "Debug", "Info", "Warn", "Error", "Critical", default is "None" -STACKTRACE_LEVEL = None - -; Generic log modes -[log.x] -FLAGS = stdflags -EXPRESSION = -PREFIX = -COLORIZE = false - -; For "console" mode only -[log.console] -LEVEL = -STDERR = false - -; For "file" mode only -[log.file] -LEVEL = -; Set the file_name for the logger. If this is a relative path this -; will be relative to ROOT_PATH -FILE_NAME = -; This enables automated log rotate(switch of following options), default is true -LOG_ROTATE = true -; Max size shift of a single file, default is 28 means 1 << 28, 256MB -MAX_SIZE_SHIFT = 28 -; Segment log daily, default is true -DAILY_ROTATE = true -; delete the log file after n days, default is 7 -MAX_DAYS = 7 -; compress logs with gzip -COMPRESS = true -; compression level see godoc for compress/gzip -COMPRESSION_LEVEL = -1 - -; For "conn" mode only -[log.conn] -LEVEL = -; Reconnect host for every single message, default is false -RECONNECT_ON_MSG = false -; Try to reconnect when connection is lost, default is false -RECONNECT = false -; Either "tcp", "unix" or "udp", default is "tcp" -PROTOCOL = tcp -; Host address -ADDR = - -; For "smtp" mode only -[log.smtp] -LEVEL = -; Name displayed in mail title, default is "Diagnostic message from server" -SUBJECT = Diagnostic message from server -; Mail server -HOST = -; Mailer user name and password -USER = -; Use PASSWD = `your password` for quoting if you use special characters in the password. -PASSWD = -; Receivers, can be one or more, e.g. 1@example.com,2@example.com -RECEIVERS = - -[cron] -; Enable running all cron tasks periodically with default settings. -ENABLED = false -; Run cron tasks when Gitea starts. -RUN_AT_START = false - -; Basic cron tasks - enabled by default - -; Clean up old repository archives -[cron.archive_cleanup] -; Whether to enable the job -ENABLED = true -; Whether to always run at least once at start up time (if ENABLED) -RUN_AT_START = true -; Notice if not success -NO_SUCCESS_NOTICE = false -; Time interval for job to run -SCHEDULE = @every 24h -; Archives created more than OLDER_THAN ago are subject to deletion -OLDER_THAN = 24h - -; Update mirrors -[cron.update_mirrors] -SCHEDULE = @every 10m -; Enable running Update mirrors task periodically. -ENABLED = true -; Run Update mirrors task when Gitea starts. -RUN_AT_START = false -; Notice if not success -NO_SUCCESS_NOTICE = true - -; Repository health check -[cron.repo_health_check] -SCHEDULE = @every 24h -; Enable running Repository health check task periodically. -ENABLED = true -; Run Repository health check task when Gitea starts. -RUN_AT_START = false -; Notice if not success -NO_SUCCESS_NOTICE = false -TIMEOUT = 60s -; Arguments for command 'git fsck', e.g. "--unreachable --tags" -; see more on http://git-scm.com/docs/git-fsck -ARGS = +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[time] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Specifies the format for fully outputted dates. Defaults to RFC1123 +;; Special supported values are ANSIC, UnixDate, RubyDate, RFC822, RFC822Z, RFC850, RFC1123, RFC1123Z, RFC3339, RFC3339Nano, Kitchen, Stamp, StampMilli, StampMicro and StampNano +;; For more information about the format see http://golang.org/pkg/time/#pkg-constants +;FORMAT = +;; +;; Location the UI time display i.e. Asia/Shanghai +;; Empty means server's location setting +;DEFAULT_UI_LOCATION = + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cron] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Common settings +;; +;; Setting this to true will enable all cron tasks periodically with default settings. +;ENABLED = false +;; Setting this to true will run all enabled cron tasks when Gitea starts. +;RUN_AT_START = false +;; +;; Note: ``SCHEDULE`` accept formats +;; - Full crontab specs, e.g. "* * * * * ?" +;; - Descriptors, e.g. "@midnight", "@every 1h30m" +;; See more: https://pkg.go.dev/github.com/gogs/cron@v0.0.0-20171120032916-9f6c956d3e14 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Basic cron tasks - enabled by default +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Clean up old repository archives +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cron.archive_cleanup] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Whether to enable the job +;ENABLED = true +;; Whether to always run at least once at start up time (if ENABLED) +;RUN_AT_START = true +;; Notice if not success +;NO_SUCCESS_NOTICE = false +;; Time interval for job to run +;SCHEDULE = @every 24h +;; Archives created more than OLDER_THAN ago are subject to deletion +;OLDER_THAN = 24h + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Update mirrors +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cron.update_mirrors] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;SCHEDULE = @every 10m +;; Enable running Update mirrors task periodically. +;ENABLED = true +;; Run Update mirrors task when Gitea starts. +;RUN_AT_START = false +;; Notice if not success +;NO_SUCCESS_NOTICE = true + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Repository health check +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cron.repo_health_check] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;SCHEDULE = @every 24h +;; Enable running Repository health check task periodically. +;ENABLED = true +;; Run Repository health check task when Gitea starts. +;RUN_AT_START = false +;; Notice if not success +;NO_SUCCESS_NOTICE = false +;TIMEOUT = 60s +;; Arguments for command 'git fsck', e.g. "--unreachable --tags" +;; see more on http://git-scm.com/docs/git-fsck +;ARGS = ; A list of repos which shouldn't be health-checked, e.g. "user1/repo1 user2/repo2" SKIP_REPOS = -; Check repository statistics -[cron.check_repo_stats] -; Enable running check repository statistics task periodically. -ENABLED = true -; Run check repository statistics task when Gitea starts. -RUN_AT_START = true -; Notice if not success -NO_SUCCESS_NOTICE = false -SCHEDULE = @every 24h - -[cron.update_migration_poster_id] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Check repository statistics +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cron.check_repo_stats] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Enable running check repository statistics task periodically. +;ENABLED = true +;; Run check repository statistics task when Gitea starts. +;RUN_AT_START = true +;; Notice if not success +;NO_SUCCESS_NOTICE = false +;SCHEDULE = @every 24h + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cron.update_migration_poster_id] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; Update migrated repositories' issues and comments' posterid, it will always attempt synchronization when the instance starts. -ENABLED = true -; Update migrated repositories' issues and comments' posterid when starting server (default true) -RUN_AT_START = true -; Notice if not success -NO_SUCCESS_NOTICE = false -; Interval as a duration between each synchronization. (default every 24h) -SCHEDULE = @every 24h - -; Synchronize external user data (only LDAP user synchronization is supported) -[cron.sync_external_users] -ENABLED = true -; Synchronize external user data when starting server (default false) -RUN_AT_START = false -; Notice if not success -NO_SUCCESS_NOTICE = false -; Interval as a duration between each synchronization (default every 24h) -SCHEDULE = @every 24h -; Create new users, update existing user data and disable users that are not in external source anymore (default) -; or only create new users if UPDATE_EXISTING is set to false -UPDATE_EXISTING = true - -; Clean-up deleted branches -[cron.deleted_branches_cleanup] -ENABLED = true -; Clean-up deleted branches when starting server (default true) -RUN_AT_START = true -; Notice if not success -NO_SUCCESS_NOTICE = false -; Interval as a duration between each synchronization (default every 24h) -SCHEDULE = @every 24h -; deleted branches than OLDER_THAN ago are subject to deletion -OLDER_THAN = 24h - -; Cleanup hook_task table -[cron.cleanup_hook_task_table] -; Whether to enable the job -ENABLED = true -; Whether to always run at start up time (if ENABLED) -RUN_AT_START = false -; Time interval for job to run -SCHEDULE = @every 24h -; OlderThan or PerWebhook. How the records are removed, either by age (i.e. how long ago hook_task record was delivered) or by the number to keep per webhook (i.e. keep most recent x deliveries per webhook). -CLEANUP_TYPE = OlderThan -; If CLEANUP_TYPE is set to OlderThan, then any delivered hook_task records older than this expression will be deleted. -OLDER_THAN = 168h -; If CLEANUP_TYPE is set to PerWebhook, this is number of hook_task records to keep for a webhook (i.e. keep the most recent x deliveries). -NUMBER_TO_KEEP = 10 - +;ENABLED = true +;; Update migrated repositories' issues and comments' posterid when starting server (default true) +;RUN_AT_START = true +;; Notice if not success +;NO_SUCCESS_NOTICE = false +;; Interval as a duration between each synchronization. (default every 24h) +;SCHEDULE = @every 24h + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Synchronize external user data (only LDAP user synchronization is supported) +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cron.sync_external_users] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;ENABLED = true +;; Synchronize external user data when starting server (default false) +;RUN_AT_START = false +;; Notice if not success +;NO_SUCCESS_NOTICE = false +;; Interval as a duration between each synchronization (default every 24h) +;SCHEDULE = @every 24h +;; Create new users, update existing user data and disable users that are not in external source anymore (default) +;; or only create new users if UPDATE_EXISTING is set to false +;UPDATE_EXISTING = true + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Clean-up deleted branches +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cron.deleted_branches_cleanup] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;ENABLED = true +;; Clean-up deleted branches when starting server (default true) +;RUN_AT_START = true +;; Notice if not success +;NO_SUCCESS_NOTICE = false +;; Interval as a duration between each synchronization (default every 24h) +;SCHEDULE = @every 24h +;; deleted branches than OLDER_THAN ago are subject to deletion +;OLDER_THAN = 24h + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Cleanup hook_task table +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cron.cleanup_hook_task_table] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Whether to enable the job +;ENABLED = true +;; Whether to always run at start up time (if ENABLED) +;RUN_AT_START = false +;; Time interval for job to run +;SCHEDULE = @every 24h +;; OlderThan or PerWebhook. How the records are removed, either by age (i.e. how long ago hook_task record was delivered) or by the number to keep per webhook (i.e. keep most recent x deliveries per webhook). +;CLEANUP_TYPE = OlderThan +;; If CLEANUP_TYPE is set to OlderThan, then any delivered hook_task records older than this expression will be deleted. +;OLDER_THAN = 168h +;; If CLEANUP_TYPE is set to PerWebhook, this is number of hook_task records to keep for a webhook (i.e. keep the most recent x deliveries). +;NUMBER_TO_KEEP = 10 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; Extended cron task - not enabled by default - -; Delete all unactivated accounts -[cron.delete_inactive_accounts] -ENABLED = false -RUN_AT_START = false -NO_SUCCESS_NOTICE = false -SCHEDULE = @annually -OLDER_THAN = 168h - -; Delete all repository archives -[cron.delete_repo_archives] -ENABLED = false -RUN_AT_START = false -NO_SUCCESS_NOTICE = false -SCHEDULE = @annually - -; Garbage collect all repositories -[cron.git_gc_repos] -ENABLED = false -RUN_AT_START = false -NO_SUCCESS_NOTICE = false -SCHEDULE = @every 72h -TIMEOUT = 60s -; Arguments for command 'git gc' -; The default value is same with [git] -> GC_ARGS -ARGS = - -; Update the '.ssh/authorized_keys' file with Gitea SSH keys -[cron.resync_all_sshkeys] -ENABLED = false -RUN_AT_START = false -NO_SUCCESS_NOTICE = false -SCHEDULE = @every 72h - -; Resynchronize pre-receive, update and post-receive hooks of all repositories. -[cron.resync_all_hooks] -ENABLED = false -RUN_AT_START = false -NO_SUCCESS_NOTICE = false -SCHEDULE = @every 72h - -; Reinitialize all missing Git repositories for which records exist -[cron.reinit_missing_repos] -ENABLED = false -RUN_AT_START = false -NO_SUCCESS_NOTICE = false -SCHEDULE = @every 72h - -; Delete all repositories missing their Git files -[cron.delete_missing_repos] -ENABLED = false -RUN_AT_START = false -NO_SUCCESS_NOTICE = false -SCHEDULE = @every 72h - -; Delete generated repository avatars -[cron.delete_generated_repository_avatars] -ENABLED = false -RUN_AT_START = false -NO_SUCCESS_NOTICE = false -SCHEDULE = @every 72h - -; Delete all old actions from database -[cron.delete_old_actions] -ENABLED = false -RUN_AT_START = false -NO_SUCCESS_NOTICE = false -SCHEDULE = @every 168h -OLDER_THAN = 8760h - -[git] -; The path of git executable. If empty, Gitea searches through the PATH environment. -PATH = -; Disables highlight of added and removed changes -DISABLE_DIFF_HIGHLIGHT = false -; Max number of lines allowed in a single file in diff view -MAX_GIT_DIFF_LINES = 1000 -; Max number of allowed characters in a line in diff view -MAX_GIT_DIFF_LINE_CHARACTERS = 5000 -; Max number of files shown in diff view -MAX_GIT_DIFF_FILES = 100 -; Set the default commits range size -COMMITS_RANGE_SIZE = 50 -; Set the default branches range size -BRANCHES_RANGE_SIZE = 20 -; Arguments for command 'git gc', e.g. "--aggressive --auto" -; see more on http://git-scm.com/docs/git-gc/ -GC_ARGS = -; If use git wire protocol version 2 when git version >= 2.18, default is true, set to false when you always want git wire protocol version 1 -ENABLE_AUTO_GIT_WIRE_PROTOCOL = true -; Respond to pushes to a non-default branch with a URL for creating a Pull Request (if the repository has them enabled) -PULL_REQUEST_PUSH_MESSAGE = true - -; Operation timeout in seconds -[git.timeout] -DEFAULT = 360 -MIGRATE = 600 -MIRROR = 300 -CLONE = 300 -PULL = 300 -GC = 60 - -[mirror] -; Default interval as a duration between each check -DEFAULT_INTERVAL = 8h -; Min interval as a duration must be > 1m -MIN_INTERVAL = 10m - -[api] -; Enables Swagger. True or false; default is true. -ENABLE_SWAGGER = true -; Max number of items in a page -MAX_RESPONSE_ITEMS = 50 -; Default paging number of api -DEFAULT_PAGING_NUM = 30 -; Default and maximum number of items per page for git trees api -DEFAULT_GIT_TREES_PER_PAGE = 1000 -; Default size of a blob returned by the blobs API (default is 10MiB) -DEFAULT_MAX_BLOB_SIZE = 10485760 - -[oauth2] -; Enables OAuth2 provider -ENABLE = true -; Lifetime of an OAuth2 access token in seconds -ACCESS_TOKEN_EXPIRATION_TIME = 3600 -; Lifetime of an OAuth2 refresh token in hours -REFRESH_TOKEN_EXPIRATION_TIME = 730 -; Check if refresh token got already used -INVALIDATE_REFRESH_TOKENS = false -; OAuth2 authentication secret for access and refresh tokens, change this yourself to a unique string. CLI generate option is helpful in this case. https://docs.gitea.io/en-us/command-line/#generate -JWT_SECRET = -; Maximum length of oauth2 token/cookie stored on server -MAX_TOKEN_LENGTH = 32767 - -[i18n] -LANGS = en-US,zh-CN,zh-HK,zh-TW,de-DE,fr-FR,nl-NL,lv-LV,ru-RU,uk-UA,ja-JP,es-ES,pt-BR,pt-PT,pl-PL,bg-BG,it-IT,fi-FI,tr-TR,cs-CZ,sr-SP,sv-SE,ko-KR -NAMES = English,简体中文,繁體中文(香港),繁體中文(台灣),Deutsch,français,Nederlands,latviešu,русский,Українська,日本語,español,português do Brasil,Português de Portugal,polski,български,italiano,suomi,Türkçe,čeština,српски,svenska,한국어 - -[U2F] -; NOTE: THE DEFAULT VALUES HERE WILL NEED TO BE CHANGED -; Two Factor authentication with security keys -; https://developers.yubico.com/U2F/App_ID.html -;APP_ID = http://localhost:3000/ -; Comma separated list of trusted facets -;TRUSTED_FACETS = http://localhost:3000/ - -; Extension mapping to highlight class -; e.g. .toml=ini -[highlight.mapping] - -[other] -SHOW_FOOTER_BRANDING = false -; Show version information about Gitea and Go in the footer -SHOW_FOOTER_VERSION = true -; Show template execution time in the footer -SHOW_FOOTER_TEMPLATE_LOAD_TIME = true - -[markup.sanitizer.1] -; The following keys can appear once to define a sanitation policy rule. -; This section can appear multiple times by adding a unique alphanumeric suffix to define multiple rules. -; e.g., [markup.sanitizer.1] -> [markup.sanitizer.2] -> [markup.sanitizer.TeX] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Delete all unactivated accounts +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cron.delete_inactive_accounts] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;ENABLED = false +;RUN_AT_START = false +;NO_SUCCESS_NOTICE = false +;SCHEDULE = @annually +;OLDER_THAN = 168h + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Delete all repository archives +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cron.delete_repo_archives] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;ENABLED = false +;RUN_AT_START = false +;NO_SUCCESS_NOTICE = false +;SCHEDULE = @annually; + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Garbage collect all repositories +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cron.git_gc_repos] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;ENABLED = false +;RUN_AT_START = false +;NO_SUCCESS_NOTICE = false +;SCHEDULE = @every 72h +;TIMEOUT = 60s +;; Arguments for command 'git gc' +;; The default value is same with [git] -> GC_ARGS +;ARGS = + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Update the '.ssh/authorized_keys' file with Gitea SSH keys +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cron.resync_all_sshkeys] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;ENABLED = false +;RUN_AT_START = false +;NO_SUCCESS_NOTICE = false +;SCHEDULE = @every 72h + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Resynchronize pre-receive, update and post-receive hooks of all repositories. +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cron.resync_all_hooks] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;ENABLED = false +;RUN_AT_START = false +;NO_SUCCESS_NOTICE = false +;SCHEDULE = @every 72h + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Reinitialize all missing Git repositories for which records exist +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cron.reinit_missing_repos] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;ENABLED = false +;RUN_AT_START = false +;NO_SUCCESS_NOTICE = f;alse +;SCHEDULE = @every 72h + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Delete all repositories missing their Git files +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cron.delete_missing_repos] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;ENABLED = false +;RUN_AT_START = false +;NO_SUCCESS_NOTICE = false +;SCHEDULE = @every 72h + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Delete generated repository avatars +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cron.delete_generated_repository_avatars] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;ENABLED = false +;RUN_AT_START = false +;NO_SUCCESS_NOTICE = f;alse +;SCHEDULE = @every 72h + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Delete all old actions from database +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cron.delete_old_actions] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;ENABLED = false +;RUN_AT_START = false +;NO_SUCCESS_NOTICE = false +;SCHEDULE = @every 168h +;OLDER_THAN = 8760h + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Git Operation timeout in seconds +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[git.timeout] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;DEFAULT = 360 +;MIGRATE = 600 +;MIRROR = 300 +;CLONE = 300 +;PULL = 300 +;GC = 60 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[mirror] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Default interval as a duration between each check +;DEFAULT_INTERVAL = 8h +;; Min interval as a duration must be > 1m +;MIN_INTERVAL = 10m + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[api] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Enables Swagger. True or false; default is true. +;ENABLE_SWAGGER = true +;; Max number of items in a page +;MAX_RESPONSE_ITEMS = 50 +;; Default paging number of api +;DEFAULT_PAGING_NUM = 30 +;; Default and maximum number of items per page for git trees api +;DEFAULT_GIT_TREES_PER_PAGE = 1000 +;; Default size of a blob returned by the blobs API (default is 10MiB) +;DEFAULT_MAX_BLOB_SIZE = 10485760 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[i18n] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;LANGS = en-US,zh-CN,zh-HK,zh-TW,de-DE,fr-FR,nl-NL,lv-LV,ru-RU,uk-UA,ja-JP,es-ES,pt-BR,pt-PT,pl-PL,bg-BG,it-IT,fi-FI,tr-TR,cs-CZ,sr-SP,sv-SE,ko-KR +;NAMES = English,简体中文,繁體中文(香港),繁體中文(台灣),Deutsch,français,Nederlands,latviešu,русский,Українська,日本語,español,português do Brasil,Português de Portugal,polski,български,italiano,suomi,Türkçe,čeština,српски,svenska,한국어 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[highlight.mapping] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Extension mapping to highlight class +;; e.g. .toml=ini + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[other] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;SHOW_FOOTER_BRANDING = false +;; Show version information about Gitea and Go in the footer +;SHOW_FOOTER_VERSION = true +;; Show template execution time in the footer +;SHOW_FOOTER_TEMPLATE_LOAD_TIME = true + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[markup.sanitizer.1] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; The following keys can appear once to define a sanitation policy rule. +;; This section can appear multiple times by adding a unique alphanumeric suffix to define multiple rules. +;; e.g., [markup.sanitizer.1] -> [markup.sanitizer.2] -> [markup.sanitizer.TeX] ;ELEMENT = span ;ALLOW_ATTR = class ;REGEXP = ^(info|warning|error)$ - -[markup.asciidoc] -ENABLED = false -; List of file extensions that should be rendered by an external command -FILE_EXTENSIONS = .adoc,.asciidoc -; External command to render all matching extensions -RENDER_COMMAND = "asciidoc --out-file=- -" -; Don't pass the file on STDIN, pass the filename as argument instead. -IS_INPUT_FILE = false - -[metrics] -; Enables metrics endpoint. True or false; default is false. -ENABLED = false -; If you want to add authorization, specify a token here -TOKEN = - -[task] -; Task queue type, could be `channel` or `redis`. -QUEUE_TYPE = channel -; Task queue length, available only when `QUEUE_TYPE` is `channel`. -QUEUE_LENGTH = 1000 -; Task queue connection string, available only when `QUEUE_TYPE` is `redis`. -; If there is a password of redis, use `addrs=127.0.0.1:6379 password=123 db=0`. -QUEUE_CONN_STR = "addrs=127.0.0.1:6379 db=0" - -[migrations] -; Max attempts per http/https request on migrations. -MAX_ATTEMPTS = 3 -; Backoff time per http/https request retry (seconds) -RETRY_BACKOFF = 3 -; Allowed domains for migrating, default is blank. Blank means everything will be allowed. -; Multiple domains could be separated by commas. -ALLOWED_DOMAINS = -; Blocklist for migrating, default is blank. Multiple domains could be separated by commas. -; When ALLOWED_DOMAINS is not blank, this option will be ignored. -BLOCKED_DOMAINS = -; Allow private addresses defined by RFC 1918, RFC 1122, RFC 4632 and RFC 4291 (false by default) -ALLOW_LOCALNETWORKS = false - -; default storage for attachments, lfs and avatars -[storage] -; storage type -STORAGE_TYPE = local - -; lfs storage will override storage -[lfs] -STORAGE_TYPE = local - -; customize storage +;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Other markup formats e.g. asciidoc +;; +;; uncomment and enable the below section. +;; (You can add other markup formats by copying the section and adjusting +;; the section name suffix "asciidoc" to something else.) +;[markup.asciidoc] +;ENABLED = false +;; List of file extensions that should be rendered by an external command +;FILE_EXTENSIONS = .adoc,.asciidoc +;; External command to render all matching extensions +;RENDER_COMMAND = "asciidoc --out-file=- -" +;; Don't pass the file on STDIN, pass the filename as argument instead. +;IS_INPUT_FILE = false + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[metrics] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Enables metrics endpoint. True or false; default is false. +;ENABLED = false +;; If you want to add authorization, specify a token here +;TOKEN = + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[task] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Task queue type, could be `channel` or `redis`. +;QUEUE_TYPE = channel +;; +;; Task queue length, available only when `QUEUE_TYPE` is `channel`. +;QUEUE_LENGTH = 1000 +;; +;; Task queue connection string, available only when `QUEUE_TYPE` is `redis`. +;; If there is a password of redis, use `addrs=127.0.0.1:6379 password=123 db=0`. +;QUEUE_CONN_STR = "addrs=127.0.0.1:6379 db=0" + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[migrations] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Max attempts per http/https request on migrations. +;MAX_ATTEMPTS = 3 +;; +;; Backoff time per http/https request retry (seconds) +;RETRY_BACKOFF = 3 +;; +;; Allowed domains for migrating, default is blank. Blank means everything will be allowed. +;; Multiple domains could be separated by commas. +;ALLOWED_DOMAINS = +;; +;; Blocklist for migrating, default is blank. Multiple domains could be separated by commas. +;; When ALLOWED_DOMAINS is not blank, this option will be ignored. +;BLOCKED_DOMAINS = +;; +;; Allow private addresses defined by RFC 1918, RFC 1122, RFC 4632 and RFC 4291 (false by default) +;ALLOW_LOCALNETWORKS = false + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; default storage for attachments, lfs and avatars +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[storage] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; storage type +;STORAGE_TYPE = local + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; settings for repository archives, will override storage setting +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[storage.repo-archive] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; storage type +;STORAGE_TYPE = local + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; lfs storage will override storage +;; +;[lfs] +;STORAGE_TYPE = local + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; customize storage ;[storage.my_minio] ;STORAGE_TYPE = minio -; Minio endpoint to connect only available when STORAGE_TYPE is `minio` +;; +;; Minio endpoint to connect only available when STORAGE_TYPE is `minio` ;MINIO_ENDPOINT = localhost:9000 -; Minio accessKeyID to connect only available when STORAGE_TYPE is `minio` +;; +;; Minio accessKeyID to connect only available when STORAGE_TYPE is `minio` ;MINIO_ACCESS_KEY_ID = -; Minio secretAccessKey to connect only available when STORAGE_TYPE is `minio` +;; +;; Minio secretAccessKey to connect only available when STORAGE_TYPE is `minio` ;MINIO_SECRET_ACCESS_KEY = -; Minio bucket to store the attachments only available when STORAGE_TYPE is `minio` +;; +;; Minio bucket to store the attachments only available when STORAGE_TYPE is `minio` ;MINIO_BUCKET = gitea -; Minio location to create bucket only available when STORAGE_TYPE is `minio` +;; +;; Minio location to create bucket only available when STORAGE_TYPE is `minio` ;MINIO_LOCATION = us-east-1 -; Minio enabled ssl only available when STORAGE_TYPE is `minio` +;; +;; Minio enabled ssl only available when STORAGE_TYPE is `minio` ;MINIO_USE_SSL = false diff --git a/docker/manifest.rootless.tmpl b/docker/manifest.rootless.tmpl index aed36caa4e87..1d14041ff276 100644 --- a/docker/manifest.rootless.tmpl +++ b/docker/manifest.rootless.tmpl @@ -1,18 +1,19 @@ -image: gitea/gitea:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}-rootless +image: gitea/gitea:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}dev{{/if}}-rootless {{#if build.tags}} tags: {{#each build.tags}} - {{this}}-rootless {{/each}} + - "latest-rootless" {{/if}} manifests: - - image: gitea/gitea:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64-rootless + image: gitea/gitea:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}dev{{/if}}-linux-amd64-rootless platform: architecture: amd64 os: linux - - image: gitea/gitea:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64-rootless + image: gitea/gitea:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}dev{{/if}}-linux-arm64-rootless platform: architecture: arm64 os: linux diff --git a/docker/manifest.tmpl b/docker/manifest.tmpl index 9678449628a1..43a57f7f2722 100644 --- a/docker/manifest.tmpl +++ b/docker/manifest.tmpl @@ -1,19 +1,20 @@ -image: gitea/gitea:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}} +image: gitea/gitea:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}dev{{/if}} {{#if build.tags}} tags: {{#each build.tags}} - {{this}} {{/each}} + - "latest" {{/if}} manifests: - - image: gitea/gitea:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64 + image: gitea/gitea:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{else}}dev-{{/if}}linux-amd64 platform: architecture: amd64 os: linux - - image: gitea/gitea:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64 + image: gitea/gitea:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{else}}dev-{{/if}}linux-arm64 platform: architecture: arm64 os: linux - variant: v8 + variant: v8 \ No newline at end of file diff --git a/docker/root/etc/s6/gitea/setup b/docker/root/etc/s6/gitea/setup index 4127025dd019..f28bc68bdcef 100755 --- a/docker/root/etc/s6/gitea/setup +++ b/docker/root/etc/s6/gitea/setup @@ -23,7 +23,7 @@ if [ ! -f ${GITEA_CUSTOM}/conf/app.ini ]; then INSTALL_LOCK=true fi - # Substitude the environment variables in the template + # Substitute the environment variables in the template APP_NAME=${APP_NAME:-"Gitea: Git with a cup of tea"} \ RUN_MODE=${RUN_MODE:-"prod"} \ DOMAIN=${DOMAIN:-"localhost"} \ diff --git a/docker/root/etc/s6/openssh/setup b/docker/root/etc/s6/openssh/setup index 2a5eb9b09f0a..89c03092be17 100755 --- a/docker/root/etc/s6/openssh/setup +++ b/docker/root/etc/s6/openssh/setup @@ -24,9 +24,31 @@ if [ ! -f /data/ssh/ssh_host_ecdsa_key ]; then ssh-keygen -t ecdsa -b 256 -f /data/ssh/ssh_host_ecdsa_key -N "" > /dev/null fi +if [ -e /data/ssh/ssh_host_ed25519_cert ]; then + SSH_ED25519_CERT=${SSH_ED25519_CERT:-"/data/ssh/ssh_host_ed25519_cert"} +fi + +if [ -e /data/ssh/ssh_host_rsa_cert ]; then + SSH_RSA_CERT=${SSH_RSA_CERT:-"/data/ssh/ssh_host_rsa_cert"} +fi + +if [ -e /data/ssh/ssh_host_ecdsa_cert ]; then + SSH_ECDSA_CERT=${SSH_ECDSA_CERT:-"/data/ssh/ssh_host_ecdsa_cert"} +fi + +if [ -e /data/ssh/ssh_host_dsa_cert ]; then + SSH_DSA_CERT=${SSH_DSA_CERT:-"/data/ssh/ssh_host_dsa_cert"} +fi + if [ -d /etc/ssh ]; then SSH_PORT=${SSH_PORT:-"22"} \ SSH_LISTEN_PORT=${SSH_LISTEN_PORT:-"${SSH_PORT}"} \ + SSH_ED25519_CERT="${SSH_ED25519_CERT:+"HostCertificate "}${SSH_ED25519_CERT}" \ + SSH_RSA_CERT="${SSH_RSA_CERT:+"HostCertificate "}${SSH_RSA_CERT}" \ + SSH_ECDSA_CERT="${SSH_ECDSA_CERT:+"HostCertificate "}${SSH_ECDSA_CERT}" \ + SSH_DSA_CERT="${SSH_DSA_CERT:+"HostCertificate "}${SSH_DSA_CERT}" \ + SSH_MAX_STARTUPS="${SSH_MAX_STARTUPS:+"MaxStartups "}${SSH_MAX_STARTUPS}" \ + SSH_MAX_SESSIONS="${SSH_MAX_SESSIONS:+"MaxSessions "}${SSH_MAX_SESSIONS}" \ envsubst < /etc/templates/sshd_config > /etc/ssh/sshd_config chmod 0644 /etc/ssh/sshd_config diff --git a/docker/root/etc/templates/sshd_config b/docker/root/etc/templates/sshd_config index 26e26feb4127..8d336f3a8eda 100644 --- a/docker/root/etc/templates/sshd_config +++ b/docker/root/etc/templates/sshd_config @@ -5,16 +5,19 @@ AddressFamily any ListenAddress 0.0.0.0 ListenAddress :: +${SSH_MAX_STARTUPS} +${SSH_MAX_SESSIONS} + LogLevel INFO HostKey /data/ssh/ssh_host_ed25519_key -HostCertificate /data/ssh/ssh_host_ed25519_cert +${SSH_ED25519_CERT} HostKey /data/ssh/ssh_host_rsa_key -HostCertificate /data/ssh/ssh_host_rsa_cert +${SSH_RSA_CERT} HostKey /data/ssh/ssh_host_ecdsa_key -HostCertificate /data/ssh/ssh_host_ecdsa_cert +${SSH_ECDSA_CERT} HostKey /data/ssh/ssh_host_dsa_key -HostCertificate /data/ssh/ssh_host_dsa_cert +${SSH_DSA_CERT} AuthorizedKeysFile .ssh/authorized_keys AuthorizedPrincipalsFile .ssh/authorized_principals diff --git a/docker/rootless/usr/local/bin/docker-setup.sh b/docker/rootless/usr/local/bin/docker-setup.sh index ef86d01c9f30..47645726c4b0 100755 --- a/docker/rootless/usr/local/bin/docker-setup.sh +++ b/docker/rootless/usr/local/bin/docker-setup.sh @@ -25,7 +25,7 @@ if [ ! -f ${GITEA_APP_INI} ]; then INSTALL_LOCK=true fi - # Substitude the environment variables in the template + # Substitute the environment variables in the template APP_NAME=${APP_NAME:-"Gitea: Git with a cup of tea"} \ RUN_MODE=${RUN_MODE:-"prod"} \ RUN_USER=${USER:-"git"} \ diff --git a/docs/Makefile b/docs/Makefile index 487e16cf6fc2..68afe03e75fe 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -31,4 +31,4 @@ update: $(THEME) $(THEME): $(THEME)/theme.toml $(THEME)/theme.toml: mkdir -p $$(dirname $@) - curl -s $(ARCHIVE) | tar xz -C $$(dirname $@) + curl -L -s $(ARCHIVE) | tar xz -C $$(dirname $@) diff --git a/docs/config.yaml b/docs/config.yaml index a7b31f3f007e..451984e09d7c 100644 --- a/docs/config.yaml +++ b/docs/config.yaml @@ -18,7 +18,7 @@ params: description: Git with a cup of tea author: The Gitea Authors website: https://docs.gitea.io - version: 1.14.1 + version: 1.14.4 minGoVersion: 1.14 goVersion: 1.16 minNodeVersion: 12.17 diff --git a/docs/content/doc/advanced/config-cheat-sheet.en-us.md b/docs/content/doc/advanced/config-cheat-sheet.en-us.md index fa532c37408f..3bc77c4e251a 100644 --- a/docs/content/doc/advanced/config-cheat-sheet.en-us.md +++ b/docs/content/doc/advanced/config-cheat-sheet.en-us.md @@ -94,10 +94,11 @@ Values containing `#` or `;` must be quoted using `` ` `` or `"""`. - `REOPEN_KEYWORDS`: **reopen**, **reopens**, **reopened**: List of keywords used in Pull Request comments to automatically reopen a related issue - `DEFAULT_MERGE_MESSAGE_COMMITS_LIMIT`: **50**: In the default merge message for squash commits include at most this many commits. Set to `-1` to include all commits -- `DEFAULT_MERGE_MESSAGE_SIZE`: **5120**: In the default merge message for squash commits limit the size of the commit messages. Set to `-1` to have no limit. +- `DEFAULT_MERGE_MESSAGE_SIZE`: **5120**: In the default merge message for squash commits limit the size of the commit messages. Set to `-1` to have no limit. Only used if `POPULATE_SQUASH_COMMENT_WITH_COMMIT_MESSAGES` is `true`. - `DEFAULT_MERGE_MESSAGE_ALL_AUTHORS`: **false**: In the default merge message for squash commits walk all commits to include all authors in the Co-authored-by otherwise just use those in the limited list - `DEFAULT_MERGE_MESSAGE_MAX_APPROVERS`: **10**: In default merge messages limit the number of approvers listed as `Reviewed-by:`. Set to `-1` to include all. - `DEFAULT_MERGE_MESSAGE_OFFICIAL_APPROVERS_ONLY`: **true**: In default merge messages only include approvers who are officially allowed to review. +- `POPULATE_SQUASH_COMMENT_WITH_COMMIT_MESSAGES`: **false**: In default squash-merge messages include the commit message of all commits comprising the pull request. ### Repository - Issue (`repository.issue`) @@ -127,8 +128,8 @@ Values containing `#` or `;` must be quoted using `` ` `` or `"""`. - Options other than `never` and `always` can be combined as a comma separated list. - `DEFAULT_TRUST_MODEL`: **collaborator**: \[collaborator, committer, collaboratorcommitter\]: The default trust model used for verifying commits. - `collaborator`: Trust signatures signed by keys of collaborators. - - `committer`: Trust signatures that match committers (This matches GitHub and will force Gitea signed commits to have Gitea as the commmitter). - - `collaboratorcommitter`: Trust signatures signed by keys of collaborators which match the commiter. + - `committer`: Trust signatures that match committers (This matches GitHub and will force Gitea signed commits to have Gitea as the committer). + - `collaboratorcommitter`: Trust signatures signed by keys of collaborators which match the committer. - `WIKI`: **never**: \[never, pubkey, twofa, always, parentsigned\]: Sign commits to wiki. - `CRUD_ACTIONS`: **pubkey, twofa, parentsigned**: \[never, pubkey, twofa, parentsigned, always\]: Sign CRUD actions. - Options as above, with the addition of: @@ -143,6 +144,15 @@ Values containing `#` or `;` must be quoted using `` ` `` or `"""`. - `LOCAL_COPY_PATH`: **tmp/local-repo**: Path for temporary local repository copies. Defaults to `tmp/local-repo` +## Repository - MIME type mapping (`repository.mimetype_mapping`) + +Configuration for set the expected MIME type based on file extensions of downloadable files. Configuration presents in key-value pairs and file extensions starts with leading `.`. + +The following configuration set `Content-Type: application/vnd.android.package-archive` header when downloading files with `.apk` file extension. +```ini +.apk=application/vnd.android.package-archive +``` + ## CORS (`cors`) - `ENABLED`: **false**: enable cors headers (disabled by default) @@ -170,7 +180,10 @@ Values containing `#` or `;` must be quoted using `` ` `` or `"""`. - `MAX_DISPLAY_FILE_SIZE`: **8388608**: Max size of files to be displayed (default is 8MiB) - `REACTIONS`: All available reactions users can choose on issues/prs and comments Values can be emoji alias (:smile:) or a unicode emoji. - For custom reactions, add a tightly cropped square image to public/emoji/img/reaction_name.png + For custom reactions, add a tightly cropped square image to public/img/emoji/reaction_name.png +- `CUSTOM_EMOJIS`: **gitea, codeberg, gitlab, git, github, gogs**: Additional Emojis not defined in the utf8 standard. + By default we support gitea (:gitea:), to add more copy them to public/img/emoji/emoji_name.png and + add it to this config. - `DEFAULT_SHOW_FULL_NAME`: **false**: Whether the full name of the users should be shown where possible. If the full name isn't set, the username will be used. - `SEARCH_REPO_DESCRIPTION`: **true**: Whether to search within description at repository search on explore page. - `USE_SERVICE_WORKER`: **true**: Whether to enable a Service Worker to cache frontend assets. @@ -242,6 +255,9 @@ Values containing `#` or `;` must be quoted using `` ` `` or `"""`. most cases you do not need to change the default value. Alter it only if your SSH server node is not the same as HTTP node. Do not set this variable if `PROTOCOL` is set to `unix`. +- `PER_WRITE_TIMEOUT`: **30s**: Timeout for any write to the connection. (Set to 0 to + disable all timeouts.) +- `PER_WRITE_PER_KB_TIMEOUT`: **10s**: Timeout per Kb written to connections. - `DISABLE_SSH`: **false**: Disable SSH feature when it's not available. - `START_SSH_SERVER`: **false**: When enabled, use the built-in SSH server. @@ -258,6 +274,7 @@ Values containing `#` or `;` must be quoted using `` ` `` or `"""`. - `SSH_AUTHORIZED_PRINCIPALS_ALLOW`: **off** or **username, email**: \[off, username, email, anything\]: Specify the principals values that users are allowed to use as principal. When set to `anything` no checks are done on the principal string. When set to `off` authorized principal are not allowed to be set. - `SSH_CREATE_AUTHORIZED_PRINCIPALS_FILE`: **false/true**: Gitea will create a authorized_principals file by default when it is not using the internal ssh server and `SSH_AUTHORIZED_PRINCIPALS_ALLOW` is not `off`. - `SSH_AUTHORIZED_PRINCIPALS_BACKUP`: **false/true**: Enable SSH Authorized Principals Backup when rewriting all keys, default is true if `SSH_AUTHORIZED_PRINCIPALS_ALLOW` is not `off`. +- `SSH_AUTHORIZED_KEYS_COMMAND_TEMPLATE`: **{{.AppPath}} --config={{.CustomConf}} serv key-{{.Key.ID}}**: Set the template for the command to passed on authorized keys. Possible keys are: AppPath, AppWorkPath, CustomConf, CustomPath, Key - where Key is a `models.PublicKey` and the others are strings which are shellquoted. - `SSH_SERVER_CIPHERS`: **aes128-ctr, aes192-ctr, aes256-ctr, aes128-gcm@openssh.com, arcfour256, arcfour128**: For the built-in SSH server, choose the ciphers to support for SSH connections, for system SSH this setting has no effect. - `SSH_SERVER_KEY_EXCHANGES`: **diffie-hellman-group1-sha1, diffie-hellman-group14-sha1, ecdh-sha2-nistp256, ecdh-sha2-nistp384, ecdh-sha2-nistp521, curve25519-sha256@libssh.org**: For the built-in SSH server, choose the key exchange algorithms to support for SSH connections, for system SSH this setting has no effect. - `SSH_SERVER_MACS`: **hmac-sha2-256-etm@openssh.com, hmac-sha2-256, hmac-sha1, hmac-sha1-96**: For the built-in SSH server, choose the MACs to support for SSH connections, for system SSH this setting has no effect @@ -265,6 +282,9 @@ Values containing `#` or `;` must be quoted using `` ` `` or `"""`. - `SSH_KEY_TEST_PATH`: **/tmp**: Directory to create temporary files in when testing public keys using ssh-keygen, default is the system temporary directory. - `SSH_KEYGEN_PATH`: **ssh-keygen**: Path to ssh-keygen, default is 'ssh-keygen' which means the shell is responsible for finding out which one to call. - `SSH_EXPOSE_ANONYMOUS`: **false**: Enable exposure of SSH clone URL to anonymous visitors, default is false. +- `SSH_PER_WRITE_TIMEOUT`: **30s**: Timeout for any write to the SSH connections. (Set to + 0 to disable all timeouts.) +- `SSH_PER_WRITE_PER_KB_TIMEOUT`: **10s**: Timeout per Kb written to SSH connections. - `MINIMUM_KEY_SIZE_CHECK`: **true**: Indicate whether to check minimum key size with corresponding type. - `OFFLINE_MODE`: **false**: Disables use of CDN for static files and Gravatar for profile pictures. @@ -325,9 +345,9 @@ Values containing `#` or `;` must be quoted using `` ` `` or `"""`. - `PATH`: **data/gitea.db**: For SQLite3 only, the database file path. - `LOG_SQL`: **true**: Log the executed SQL. - `DB_RETRIES`: **10**: How many ORM init / DB connect attempts allowed. -- `DB_RETRY_BACKOFF`: **3s**: time.Duration to wait before trying another ORM init / DB connect attempt, if failure occured. +- `DB_RETRY_BACKOFF`: **3s**: time.Duration to wait before trying another ORM init / DB connect attempt, if failure occurred. - `MAX_OPEN_CONNS` **0**: Database maximum open connections - default is 0, meaning there is no limit. -- `MAX_IDLE_CONNS` **2**: Max idle database connections on connnection pool, default is 2 - this will be capped to `MAX_OPEN_CONNS`. +- `MAX_IDLE_CONNS` **2**: Max idle database connections on connection pool, default is 2 - this will be capped to `MAX_OPEN_CONNS`. - `CONN_MAX_LIFETIME` **0 or 3s**: Sets the maximum amount of time a DB connection may be reused - default is 0, meaning there is no limit (except on MySQL where it is 3s - see #6804 & #7071). Please see #8540 & #8273 for further discussion of the appropriate values for `MAX_OPEN_CONNS`, `MAX_IDLE_CONNS` & `CONN_MAX_LIFETIME` and their @@ -340,10 +360,10 @@ relation to port exhaustion. - `ISSUE_INDEXER_NAME`: **gitea_issues**: Issue indexer name, available when ISSUE_INDEXER_TYPE is elasticsearch - `ISSUE_INDEXER_PATH`: **indexers/issues.bleve**: Index file used for issue search; available when ISSUE_INDEXER_TYPE is bleve and elasticsearch. - The next 4 configuration values are deprecated and should be set in `queue.issue_indexer` however are kept for backwards compatibility: -- `ISSUE_INDEXER_QUEUE_TYPE`: **levelqueue**: Issue indexer queue, currently supports:`channel`, `levelqueue`, `redis`. -- `ISSUE_INDEXER_QUEUE_DIR`: **indexers/issues.queue**: When `ISSUE_INDEXER_QUEUE_TYPE` is `levelqueue`, this will be the path where the queue will be saved. -- `ISSUE_INDEXER_QUEUE_CONN_STR`: **addrs=127.0.0.1:6379 db=0**: When `ISSUE_INDEXER_QUEUE_TYPE` is `redis`, this will store the redis connection string. When `ISSUE_INDEXER_QUEUE_TYPE` is `levelqueue`, this is a directory or additional options of the form `leveldb://path/to/db?option=value&....`, and overrides `ISSUE_INDEXER_QUEUE_DIR`. -- `ISSUE_INDEXER_QUEUE_BATCH_NUMBER`: **20**: Batch queue number. +- `ISSUE_INDEXER_QUEUE_TYPE`: **levelqueue**: Issue indexer queue, currently supports:`channel`, `levelqueue`, `redis`. **DEPRECATED** use settings in `[queue.issue_indexer]`. +- `ISSUE_INDEXER_QUEUE_DIR`: **queues/common**: When `ISSUE_INDEXER_QUEUE_TYPE` is `levelqueue`, this will be the path where the queue will be saved. **DEPRECATED** use settings in `[queue.issue_indexer]`. +- `ISSUE_INDEXER_QUEUE_CONN_STR`: **addrs=127.0.0.1:6379 db=0**: When `ISSUE_INDEXER_QUEUE_TYPE` is `redis`, this will store the redis connection string. When `ISSUE_INDEXER_QUEUE_TYPE` is `levelqueue`, this is a directory or additional options of the form `leveldb://path/to/db?option=value&....`, and overrides `ISSUE_INDEXER_QUEUE_DIR`. **DEPRECATED** use settings in `[queue.issue_indexer]`. +- `ISSUE_INDEXER_QUEUE_BATCH_NUMBER`: **20**: Batch queue number. **DEPRECATED** use settings in `[queue.issue_indexer]`. - `REPO_INDEXER_ENABLED`: **false**: Enables code search (uses a lot of disk space, about 6 times more than the repository size). - `REPO_INDEXER_TYPE`: **bleve**: Code search engine type, could be `bleve` or `elasticsearch`. @@ -354,29 +374,29 @@ relation to port exhaustion. - `REPO_INDEXER_INCLUDE`: **empty**: A comma separated list of glob patterns (see https://github.com/gobwas/glob) to **include** in the index. Use `**.txt` to match any files with .txt extension. An empty list means include all files. - `REPO_INDEXER_EXCLUDE`: **empty**: A comma separated list of glob patterns (see https://github.com/gobwas/glob) to **exclude** from the index. Files that match this list will not be indexed, even if they match in `REPO_INDEXER_INCLUDE`. - `REPO_INDEXER_EXCLUDE_VENDORED`: **true**: Exclude vendored files from index. -- `UPDATE_BUFFER_LEN`: **20**: Buffer length of index request. +- `UPDATE_BUFFER_LEN`: **20**: Buffer length of index request. **DEPRECATED** use settings in `[queue.issue_indexer]`. - `MAX_FILE_SIZE`: **1048576**: Maximum size in bytes of files to be indexed. - `STARTUP_TIMEOUT`: **30s**: If the indexer takes longer than this timeout to start - fail. (This timeout will be added to the hammer time above for child processes - as bleve will not start until the previous parent is shutdown.) Set to zero to never timeout. ## Queue (`queue` and `queue.*`) - `TYPE`: **persistable-channel**: General queue type, currently support: `persistable-channel` (uses a LevelDB internally), `channel`, `level`, `redis`, `dummy` -- `DATADIR`: **queues/**: Base DataDir for storing persistent and level queues. `DATADIR` for individual queues can be set in `queue.name` sections but will default to `DATADIR/`**`name`**. +- `DATADIR`: **queues/**: Base DataDir for storing persistent and level queues. `DATADIR` for individual queues can be set in `queue.name` sections but will default to `DATADIR/`**`common`**. (Previously each queue would default to `DATADIR/`**`name`**.) - `LENGTH`: **20**: Maximal queue size before channel queues block - `BATCH_LENGTH`: **20**: Batch data before passing to the handler - `CONN_STR`: **redis://127.0.0.1:6379/0**: Connection string for the redis queue type. Options can be set using query params. Similarly LevelDB options can also be set using: **leveldb://relative/path?option=value** or **leveldb:///absolute/path?option=value**, and will override `DATADIR` -- `QUEUE_NAME`: **_queue**: The suffix for default redis and disk queue name. Individual queues will default to **`name`**`QUEUE_NAME` but can be overriden in the specific `queue.name` section. +- `QUEUE_NAME`: **_queue**: The suffix for default redis and disk queue name. Individual queues will default to **`name`**`QUEUE_NAME` but can be overridden in the specific `queue.name` section. - `SET_NAME`: **_unique**: The suffix that will be added to the default redis and disk queue `set` name for unique queues. Individual queues will default to **`name`**`QUEUE_NAME`_`SET_NAME`_ but can be overridden in the specific `queue.name` section. - `WRAP_IF_NECESSARY`: **true**: Will wrap queues with a timeoutable queue if the selected queue is not ready to be created - (Only relevant for the level queue.) - `MAX_ATTEMPTS`: **10**: Maximum number of attempts to create the wrapped queue - `TIMEOUT`: **GRACEFUL_HAMMER_TIME + 30s**: Timeout the creation of the wrapped queue if it takes longer than this to create. - Queues by default come with a dynamically scaling worker pool. The following settings configure this: -- `WORKERS`: **1**: Number of initial workers for the queue. +- `WORKERS`: **0** (v1.14 and before: **1**): Number of initial workers for the queue. - `MAX_WORKERS`: **10**: Maximum number of worker go-routines for the queue. - `BLOCK_TIMEOUT`: **1s**: If the queue blocks for this time, boost the number of workers - the `BLOCK_TIMEOUT` will then be doubled before boosting again whilst the boost is ongoing. - `BOOST_TIMEOUT`: **5m**: Boost workers will timeout after this long. -- `BOOST_WORKERS`: **5**: This many workers will be added to the worker pool if there is a boost. +- `BOOST_WORKERS`: **1** (v1.14 and before: **5**): This many workers will be added to the worker pool if there is a boost. ## Admin (`admin`) @@ -440,7 +460,7 @@ relation to port exhaustion. - nickname - use the nickname attribute - email - use the username part of the email attribute - `UPDATE_AVATAR`: **false**: Update avatar if available from oauth2 provider. Update will be performed on each login. -- `ACCOUNT_LINKING`: **disabled**: How to handle if an account / email already exists: +- `ACCOUNT_LINKING`: **login**: How to handle if an account / email already exists: - disabled - show an error - login - show an account linking login - auto - automatically link with the account (Please be aware that this will grant access to an existing account just because the same username or email is provided. You must make sure that this does not cause issues with your authentication providers.) @@ -495,12 +515,16 @@ relation to port exhaustion. - `SHOW_MILESTONES_DASHBOARD_PAGE`: **true** Enable this to show the milestones dashboard page - a view of all the user's milestones - `AUTO_WATCH_NEW_REPOS`: **true**: Enable this to let all organisation users watch new repos when they are created - `AUTO_WATCH_ON_CHANGES`: **false**: Enable this to make users watch a repository after their first commit to it +- `DEFAULT_USER_VISIBILITY`: **public**: Set default visibility mode for users, either "public", "limited" or "private". +- `ALLOWED_USER_VISIBILITY_MODES`: **public,limited,private**: Set which visibility modes a user can have - `DEFAULT_ORG_VISIBILITY`: **public**: Set default visibility mode for organisations, either "public", "limited" or "private". - `DEFAULT_ORG_MEMBER_VISIBLE`: **false** True will make the membership of the users visible when added to the organisation. +- `ALLOW_ONLY_INTERNAL_REGISTRATION`: **false** Set to true to force registration only via gitea. - `ALLOW_ONLY_EXTERNAL_REGISTRATION`: **false** Set to true to force registration only using third-party services. - `NO_REPLY_ADDRESS`: **noreply.DOMAIN** Value for the domain part of the user's email address in the git log if user has set KeepEmailPrivate to true. DOMAIN resolves to the value in server.DOMAIN. The user's email will be replaced with a concatenation of the user name in lower case, "@" and NO_REPLY_ADDRESS. - `USER_DELETE_WITH_COMMENTS_MAX_TIME`: **0** Minimum amount of time a user must exist before comments are kept when the user is deleted. +- `VALID_SITE_URL_SCHEMES`: **http, https**: Valid site url schemes for user profiles ### Service - Expore (`service.explore`) @@ -532,9 +556,9 @@ Define allowed algorithms and their minimum key length (use -1 to disable a type - `DISABLE_HELO`: **\**: Disable HELO operation. - `HELO_HOSTNAME`: **\**: Custom hostname for HELO operation. - `HOST`: **\**: SMTP mail host address and port (example: smtp.gitea.io:587). - - Using opportunistic TLS via STARTTLS on port 587 is recommended per RFC 6409. + - As per RFC 8314, if supported, Implicit TLS/SMTPS on port 465 is recommended, otherwise opportunistic TLS via STARTTLS on port 587 should be used. - `IS_TLS_ENABLED` : **false** : Forcibly use TLS to connect even if not on a default SMTPS port. - - Note, if the port ends with `465` SMTPS/SMTP over TLS will be used despite this setting. + - Note, if the port ends with `465` Implicit TLS/SMTPS/SMTP over TLS will be used despite this setting. - Otherwise if `IS_TLS_ENABLED=false` and the server supports `STARTTLS` this will be used. Thus if `STARTTLS` is preferred you should set `IS_TLS_ENABLED=false`. - `FROM`: **\**: Mail from address, RFC 5322. This can be just an email address, or the "Name" \ format. @@ -566,11 +590,12 @@ Define allowed algorithms and their minimum key length (use -1 to disable a type ## Cache (`cache`) - `ENABLED`: **true**: Enable the cache. -- `ADAPTER`: **memory**: Cache engine adapter, either `memory`, `redis`, or `memcache`. -- `INTERVAL`: **60**: Garbage Collection interval (sec), for memory cache only. -- `HOST`: **\**: Connection string for `redis` and `memcache`. +- `ADAPTER`: **memory**: Cache engine adapter, either `memory`, `redis`, `twoqueue` or `memcache`. (`twoqueue` represents a size limited LRU cache.) +- `INTERVAL`: **60**: Garbage Collection interval (sec), for memory and twoqueue cache only. +- `HOST`: **\**: Connection string for `redis` and `memcache`. For `twoqueue` sets configuration for the queue. - Redis: `redis://:macaron@127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` - Memcache: `127.0.0.1:9090;127.0.0.1:9091` + - TwoQueue LRU cache: `{"size":50000,"recent_ratio":0.25,"ghost_ratio":0.5}` or `50000` representing the maximum number of objects stored in the cache. - `ITEM_TTL`: **16h**: Time to keep items in cache if not used, Setting it to 0 disables caching. ## Cache - LastCommitCache settings (`cache.last_commit`) @@ -647,6 +672,7 @@ Default templates for project boards: - `ROUTER`: **console**: The mode or name of the log the router should log to. (If you set this to `,` it will log to default gitea logger.) NB: You must have `DISABLE_ROUTER_LOG` set to `false` for this option to take effect. Configure each mode in per mode log subsections `\[log.modename.router\]`. - `ENABLE_ACCESS_LOG`: **false**: Creates an access.log in NCSA common log format, or as per the following template +- `ENABLE_SSH_LOG`: **false**: save ssh log to log file - `ACCESS`: **file**: Logging mode for the access logger, use a comma to separate values. Configure each mode in per mode log subsections `\[log.modename.access\]`. By default the file mode will log to `$ROOT_PATH/access.log`. (If you set this to `,` it will log to the default gitea logger.) - `ACCESS_LOG_TEMPLATE`: **`{{.Ctx.RemoteAddr}} - {{.Identity}} {{.Start.Format "[02/Jan/2006:15:04:05 -0700]" }} "{{.Ctx.Req.Method}} {{.Ctx.Req.URL.RequestURI}} {{.Ctx.Req.Proto}}" {{.ResponseWriter.Status}} {{.ResponseWriter.Size}} "{{.Ctx.Req.Referer}}\" \"{{.Ctx.Req.UserAgent}}"`**: Sets the template used to create the access log. - The following variables are available: @@ -703,6 +729,11 @@ NB: You must have `DISABLE_ROUTER_LOG` set to `false` for this option to take ef - `RUN_AT_START`: **false**: Run cron tasks at application start-up. - `NO_SUCCESS_NOTICE`: **false**: Set to true to switch off success notices. +- `SCHEDULE` accept formats + - Full crontab specs, e.g. `* * * * * ?` + - Descriptors, e.g. `@midnight`, `@every 1h30m` ... + - See more: [cron decument](https://pkg.go.dev/github.com/gogs/cron@v0.0.0-20171120032916-9f6c956d3e14) + ### Basic cron tasks - enabled by default #### Cron - Cleanup old repository archives (`cron.archive_cleanup`) @@ -808,7 +839,7 @@ NB: You must have `DISABLE_ROUTER_LOG` set to `false` for this option to take ef - `PULL_REQUEST_PUSH_MESSAGE`: **true**: Respond to pushes to a non-default branch with a URL for creating a Pull Request (if the repository has them enabled) - `VERBOSE_PUSH`: **true**: Print status information about pushes as they are being processed. - `VERBOSE_PUSH_DELAY`: **5s**: Only print verbose information if push takes longer than this delay. - +- `LARGE_OBJECT_THRESHOLD`: **1048576**: (Go-Git only), don't cache objects greater than this in memory. (Set to 0 to disable.) ## Git - Timeout settings (`git.timeout`) - `DEFAUlT`: **360**: Git operations default timeout seconds. - `MIGRATE`: **600**: Migrate external repositories timeout seconds. @@ -836,7 +867,9 @@ NB: You must have `DISABLE_ROUTER_LOG` set to `false` for this option to take ef - `ACCESS_TOKEN_EXPIRATION_TIME`: **3600**: Lifetime of an OAuth2 access token in seconds - `REFRESH_TOKEN_EXPIRATION_TIME`: **730**: Lifetime of an OAuth2 refresh token in hours - `INVALIDATE_REFRESH_TOKENS`: **false**: Check if refresh token has already been used -- `JWT_SECRET`: **\**: OAuth2 authentication secret for access and refresh tokens, change this a unique string. +- `JWT_SIGNING_ALGORITHM`: **RS256**: Algorithm used to sign OAuth2 tokens. Valid values: \[`HS256`, `HS384`, `HS512`, `RS256`, `RS384`, `RS512`, `ES256`, `ES384`, `ES512`\] +- `JWT_SECRET`: **\**: OAuth2 authentication secret for access and refresh tokens, change this to a unique string. This setting is only needed if `JWT_SIGNING_ALGORITHM` is set to `HS256`, `HS384` or `HS512`. +- `JWT_SIGNING_PRIVATE_KEY_FILE`: **jwt/private.pem**: Private key file path used to sign OAuth2 tokens. The path is relative to `APP_DATA_PATH`. This setting is only needed if `JWT_SIGNING_ALGORITHM` is set to `RS256`, `RS384`, `RS512`, `ES256`, `ES384` or `ES512`. The file must contain a RSA or ECDSA private key in the PKCS8 format. If no key exists a 4096 bit key will be created for you. - `MAX_TOKEN_LENGTH`: **32767**: Maximum length of token/cookie to accept from OAuth2 provider ## i18n (`i18n`) @@ -864,7 +897,7 @@ IS_INPUT_FILE = false - ENABLED: **false** Enable markup support; set to **true** to enable this renderer. - NEED\_POSTPROCESS: **true** set to **true** to replace links / sha1 and etc. - FILE\_EXTENSIONS: **\** List of file extensions that should be rendered by an external - command. Multiple extentions needs a comma as splitter. + command. Multiple extensions needs a comma as splitter. - RENDER\_COMMAND: External command to render all matching extensions. - IS\_INPUT\_FILE: **false** Input is not a standard input but a file param followed `RENDER_COMMAND`. @@ -882,17 +915,21 @@ Gitea supports customizing the sanitization policy for rendered HTML. The exampl ELEMENT = span ALLOW_ATTR = class REGEXP = ^\s*((math(\s+|$)|inline(\s+|$)|display(\s+|$)))+ +ALLOW_DATA_URI_IMAGES = true ``` - `ELEMENT`: The element this policy applies to. Must be non-empty. - `ALLOW_ATTR`: The attribute this policy allows. Must be non-empty. - `REGEXP`: A regex to match the contents of the attribute against. Must be present but may be empty for unconditional whitelisting of this attribute. + - `ALLOW_DATA_URI_IMAGES`: **false** Allow data uri images (``). Multiple sanitisation rules can be defined by adding unique subsections, e.g. `[markup.sanitizer.TeX-2]`. +To apply a sanitisation rules only for a specify external renderer they must use the renderer name, e.g. `[markup.sanitizer.asciidoc.rule-1]`. +If the rule is defined above the renderer ini section or the name does not match a renderer it is applied to every renderer. ## Time (`time`) -- `FORMAT`: Time format to diplay on UI. i.e. RFC1123 or 2006-01-02 15:04:05 +- `FORMAT`: Time format to display on UI. i.e. RFC1123 or 2006-01-02 15:04:05 - `DEFAULT_UI_LOCATION`: Default location of time on the UI, so that we can display correct user's time on UI. i.e. Shanghai/Asia ## Task (`task`) @@ -966,6 +1003,23 @@ MINIO_USE_SSL = false And used by `[attachment]`, `[lfs]` and etc. as `STORAGE_TYPE`. +## Repository Archive Storage (`storage.repo-archive`) + +Configuration for repository archive storage. It will inherit from default `[storage]` or +`[storage.xxx]` when set `STORAGE_TYPE` to `xxx`. The default of `PATH` +is `data/repo-archive` and the default of `MINIO_BASE_PATH` is `repo-archive/`. + +- `STORAGE_TYPE`: **local**: Storage type for repo archive, `local` for local disk or `minio` for s3 compatible object storage service or other name defined with `[storage.xxx]` +- `SERVE_DIRECT`: **false**: Allows the storage driver to redirect to authenticated URLs to serve files directly. Currently, only Minio/S3 is supported via signed URLs, local does nothing. +- `PATH`: **./data/repo-archive**: Where to store archive files, only available when `STORAGE_TYPE` is `local`. +- `MINIO_ENDPOINT`: **localhost:9000**: Minio endpoint to connect only available when `STORAGE_TYPE` is `minio` +- `MINIO_ACCESS_KEY_ID`: Minio accessKeyID to connect only available when `STORAGE_TYPE` is `minio` +- `MINIO_SECRET_ACCESS_KEY`: Minio secretAccessKey to connect only available when `STORAGE_TYPE is` `minio` +- `MINIO_BUCKET`: **gitea**: Minio bucket to store the lfs only available when `STORAGE_TYPE` is `minio` +- `MINIO_LOCATION`: **us-east-1**: Minio location to create bucket only available when `STORAGE_TYPE` is `minio` +- `MINIO_BASE_PATH`: **repo-archive/**: Minio base path on the bucket only available when `STORAGE_TYPE` is `minio` +- `MINIO_USE_SSL`: **false**: Minio enabled ssl only available when `STORAGE_TYPE` is `minio` + ## Other (`other`) - `SHOW_FOOTER_BRANDING`: **false**: Show Gitea branding in the footer. diff --git a/docs/content/doc/advanced/config-cheat-sheet.zh-cn.md b/docs/content/doc/advanced/config-cheat-sheet.zh-cn.md index 1f7ae10c5daa..2303a631d56c 100644 --- a/docs/content/doc/advanced/config-cheat-sheet.zh-cn.md +++ b/docs/content/doc/advanced/config-cheat-sheet.zh-cn.md @@ -245,6 +245,11 @@ test01.xls: application/vnd.ms-excel; charset=binary - `ENABLED`: 是否在后台运行定期任务。 - `RUN_AT_START`: 是否启动时自动运行。 +- `SCHEDULE` 所接受的格式 + - 完整 crontab 控制, 例如 `* * * * * ?` + - 描述符, 例如 `@midnight`, `@every 1h30m` ... + - 更多细节参见 [cron api文档](https://pkg.go.dev/github.com/gogs/cron@v0.0.0-20171120032916-9f6c956d3e14) + ### Cron - Update Mirrors (`cron.update_mirrors`) @@ -377,6 +382,21 @@ MINIO_USE_SSL = false 然后你在 `[attachment]`, `[lfs]` 等中可以把这个名字用作 `STORAGE_TYPE` 的值。 +## Repository Archive Storage (`storage.repo-archive`) + +Repository archive 的存储配置。 如果 `STORAGE_TYPE` 为空,则此配置将从 `[storage]` 继承。如果不为 `local` 或者 `minio` 而为 `xxx`, 则从 `[storage.xxx]` 继承。当继承时, `PATH` 默认为 `data/repo-archive`,`MINIO_BASE_PATH` 默认为 `repo-archive/`。 + +- `STORAGE_TYPE`: **local**: Repository archive 的存储类型,`local` 将存储到磁盘,`minio` 将存储到 s3 兼容的对象服务。 +- `SERVE_DIRECT`: **false**: 允许直接重定向到存储系统。当前,仅 Minio/S3 是支持的。 +- `PATH`: 存放 Repository archive 上传的文件的地方,默认是 `data/repo-archive`。 +- `MINIO_ENDPOINT`: **localhost:9000**: Minio 地址,仅当 `STORAGE_TYPE` 为 `minio` 时有效。 +- `MINIO_ACCESS_KEY_ID`: Minio accessKeyID,仅当 `STORAGE_TYPE` 为 `minio` 时有效。 +- `MINIO_SECRET_ACCESS_KEY`: Minio secretAccessKey,仅当 `STORAGE_TYPE` 为 `minio` 时有效。 +- `MINIO_BUCKET`: **gitea**: Minio bucket,仅当 `STORAGE_TYPE` 为 `minio` 时有效。 +- `MINIO_LOCATION`: **us-east-1**: Minio location ,仅当 `STORAGE_TYPE` 为 `minio` 时有效。 +- `MINIO_BASE_PATH`: **repo-archive/**: Minio base path ,仅当 `STORAGE_TYPE` 为 `minio` 时有效。 +- `MINIO_USE_SSL`: **false**: Minio 是否启用 ssl ,仅当 `STORAGE_TYPE` 为 `minio` 时有效。 + ## Other (`other`) - `SHOW_FOOTER_BRANDING`: 为真则在页面底部显示Gitea的字样。 diff --git a/docs/content/doc/advanced/customizing-gitea.en-us.md b/docs/content/doc/advanced/customizing-gitea.en-us.md index a07b5f780130..cbb749412f03 100644 --- a/docs/content/doc/advanced/customizing-gitea.en-us.md +++ b/docs/content/doc/advanced/customizing-gitea.en-us.md @@ -35,7 +35,7 @@ Again `gitea help` will allow you review this variable and you can override it u `--config` option on the `gitea` binary. - [Quick Cheat Sheet](https://docs.gitea.io/en-us/config-cheat-sheet/) -- [Complete List](https://github.com/go-gitea/gitea/blob/master/custom/conf/app.example.ini) +- [Complete List](https://github.com/go-gitea/gitea/blob/main/custom/conf/app.example.ini) If the `CustomPath` folder can't be found despite checking `gitea help`, check the `GITEA_CUSTOM` environment variable; this can be used to override the default path to something else. @@ -56,24 +56,20 @@ To make Gitea serve custom public files (like pages and images), use the folder `$GITEA_CUSTOM/public/` as the webroot. Symbolic links will be followed. For example, a file `image.png` stored in `$GITEA_CUSTOM/public/`, can be accessed with -the url `http://gitea.domain.tld/image.png`. +the url `http://gitea.domain.tld/assets/image.png`. -## Changing the default logo +## Changing the logo -To build a custom logo replace `assets/logo.svg` and run `make generate-images`. This will update -these customizable logo files which you can then place in `$GITEA_CUSTOM/public/img` on your server: +To build a custom logo clone the Gitea source repository, replace `assets/logo.svg` and run +`make generate-images`. This will update below output files which you can then place in `$GITEA_CUSTOM/public/img` on your server: -- `public/img/logo.svg` -- `public/img/logo.png` -- `public/img/favicon.png` -- `public/img/avatar_default.png` -- `public/img/apple-touch-icon.png` +- `public/img/logo.svg` - Used for favicon, site icon, app icon +- `public/img/logo.png` - Used for Open Graph +- `public/img/favicon.png` - Used as fallback for browsers that don't support SVG favicons +- `public/img/avatar_default.png` - Used as the default avatar image +- `public/img/apple-touch-icon.png` - Used on iOS devices for bookmarks -## Changing the default avatar - -Either generate it via above method or place the png image at the following path: - -- `$GITEA_CUSTOM/public/img/avatar_default.png` +In case the source image is not in vector format, you can attempt to convert a raster image using tools like [this](https://www.aconvert.com/image/png-to-svg/). ## Customizing Gitea pages and resources @@ -87,14 +83,14 @@ directory at the top of this document). Every single page of Gitea can be changed. Dynamic content is generated using [go templates](https://golang.org/pkg/html/template/), which can be modified by placing replacements below the `$GITEA_CUSTOM/templates` directory. -To obtain any embedded file (including templates), the [`gitea embedded` tool]({{< relref "doc/advanced/cmd-embedded.en-us.md" >}}) can be used. Alternatively, they can be found in the [`templates`](https://github.com/go-gitea/gitea/tree/master/templates) directory of Gitea source (Note: the example link is from the `master` branch. Make sure to use templates compatible with the release you are using). +To obtain any embedded file (including templates), the [`gitea embedded` tool]({{< relref "doc/advanced/cmd-embedded.en-us.md" >}}) can be used. Alternatively, they can be found in the [`templates`](https://github.com/go-gitea/gitea/tree/main/templates) directory of Gitea source (Note: the example link is from the `main` branch. Make sure to use templates compatible with the release you are using). Be aware that any statement contained inside `{{` and `}}` are Gitea's template syntax and shouldn't be touched without fully understanding these components. ### Customizing startpage / homepage -Copy [`home.tmpl`](https://github.com/go-gitea/gitea/blob/master/templates/home.tmpl) for your version of Gitea from `templates` to `$GITEA_CUSTOM/templates`. +Copy [`home.tmpl`](https://github.com/go-gitea/gitea/blob/main/templates/home.tmpl) for your version of Gitea from `templates` to `$GITEA_CUSTOM/templates`. Edit as you wish. Dont forget to restart your gitea to apply the changes. @@ -113,7 +109,7 @@ For more information, see [Adding Legal Pages](https://docs.gitea.io/en-us/addin You can add new tabs in the same way, putting them in `extra_tabs.tmpl`. The exact HTML needed to match the style of other tabs is in the file `templates/repo/header.tmpl` -([source in GitHub](https://github.com/go-gitea/gitea/blob/master/templates/repo/header.tmpl)) +([source in GitHub](https://github.com/go-gitea/gitea/blob/main/templates/repo/header.tmpl)) ### Other additions to the page @@ -142,7 +138,7 @@ copy javascript files from https://gitea.com/davidsvantesson/plantuml-code-highl {{end}} ``` @@ -258,7 +254,7 @@ Then restart gitea and open a STL file on your gitea instance. The `$GITEA_CUSTOM/templates/mail` folder allows changing the body of every mail of Gitea. Templates to override can be found in the -[`templates/mail`](https://github.com/go-gitea/gitea/tree/master/templates/mail) +[`templates/mail`](https://github.com/go-gitea/gitea/tree/main/templates/mail) directory of Gitea source. Override by making a copy of the file under `$GITEA_CUSTOM/templates/mail` using a full path structure matching source. @@ -282,7 +278,7 @@ To add custom .gitignore, add a file with existing [.gitignore rules](https://gi ### Labels -To add a custom label set, add a file that follows the [label format](https://github.com/go-gitea/gitea/blob/master/options/label/Default) to `$GITEA_CUSTOM/options/label` +To add a custom label set, add a file that follows the [label format](https://github.com/go-gitea/gitea/blob/main/options/label/Default) to `$GITEA_CUSTOM/options/label` `#hex-color label name ; label description` ### Licenses @@ -293,7 +289,7 @@ To add a custom license, add a file with the license text to `$GITEA_CUSTOM/opti Locales are managed via our [crowdin](https://crowdin.com/project/gitea). You can override a locale by placing an altered locale file in `$GITEA_CUSTOM/options/locale`. -Gitea's default locale files can be found in the [`options/locale`](https://github.com/go-gitea/gitea/tree/master/options/locale) source folder and these should be used as examples for your changes. +Gitea's default locale files can be found in the [`options/locale`](https://github.com/go-gitea/gitea/tree/main/options/locale) source folder and these should be used as examples for your changes. To add a completely new locale, as well as placing the file in the above location, you will need to add the new lang and name to the `[i18n]` section in your `app.ini`. Keep in mind that Gitea will use those settings as **overrides**, so if you want to keep the other languages as well you will need to copy/paste the default values and add your own to them. diff --git a/docs/content/doc/advanced/customizing-gitea.zh-cn.md b/docs/content/doc/advanced/customizing-gitea.zh-cn.md index 91762b13d96b..4640f878ad0b 100644 --- a/docs/content/doc/advanced/customizing-gitea.zh-cn.md +++ b/docs/content/doc/advanced/customizing-gitea.zh-cn.md @@ -40,7 +40,7 @@ Gitea 引用 `custom` 目录中的自定义配置文件来覆盖配置、模板 将自定义的公共文件(比如页面和图片)作为 webroot 放在 `custom/public/` 中来让 Gitea 提供这些自定义内容(符号链接将被追踪)。 -举例说明:`image.png` 存放在 `custom/public/`中,那么它可以通过链接 http://gitea.domain.tld/image.png 访问。 +举例说明:`image.png` 存放在 `custom/public/`中,那么它可以通过链接 http://gitea.domain.tld/assets/image.png 访问。 ## 修改默认头像 diff --git a/docs/content/doc/advanced/external-renderers.en-us.md b/docs/content/doc/advanced/external-renderers.en-us.md index 6b283ca2e134..c0109b801409 100644 --- a/docs/content/doc/advanced/external-renderers.en-us.md +++ b/docs/content/doc/advanced/external-renderers.en-us.md @@ -64,8 +64,8 @@ IS_INPUT_FILE = false [markup.jupyter] ENABLED = true FILE_EXTENSIONS = .ipynb -RENDER_COMMAND = "jupyter nbconvert --stdout --to html --template basic " -IS_INPUT_FILE = true +RENDER_COMMAND = "jupyter nbconvert --stdin --stdout --to html --template basic" +IS_INPUT_FILE = false [markup.restructuredtext] ENABLED = true @@ -90,11 +90,79 @@ FILE_EXTENSIONS = .md,.markdown RENDER_COMMAND = pandoc -f markdown -t html --katex ``` -You must define `ELEMENT`, `ALLOW_ATTR`, and `REGEXP` in each section. +You must define `ELEMENT` and `ALLOW_ATTR` in each section. To define multiple entries, add a unique alphanumeric suffix (e.g., `[markup.sanitizer.1]` and `[markup.sanitizer.something]`). +To apply a sanitisation rules only for a specify external renderer they must use the renderer name, e.g. `[markup.sanitizer.asciidoc.rule-1]`, `[markup.sanitizer..rule-1]`. + +**Note**: If the rule is defined above the renderer ini section or the name does not match a renderer it is applied to every renderer. + Once your configuration changes have been made, restart Gitea to have changes take effect. **Note**: Prior to Gitea 1.12 there was a single `markup.sanitiser` section with keys that were redefined for multiple rules, however, there were significant problems with this method of configuration necessitating configuration through multiple sections. + +### Example: Office DOCX + +Display Office DOCX files with [`pandoc`](https://pandoc.org/): +```ini +[markup.docx] +ENABLED = true +FILE_EXTENSIONS = .docx +RENDER_COMMAND = "pandoc --from docx --to html --self-contained --template /path/to/basic.html" + +[markup.sanitizer.docx.img] +ALLOW_DATA_URI_IMAGES = true +``` + +The template file has the following content: +``` +$body$ +``` + +### Example: Jupyter Notebook + +Display Jupyter Notebook files with [`nbconvert`](https://github.com/jupyter/nbconvert): +```ini +[markup.jupyter] +ENABLED = true +FILE_EXTENSIONS = .ipynb +RENDER_COMMAND = "jupyter-nbconvert --stdin --stdout --to html --template basic" + +[markup.sanitizer.jupyter.img] +ALLOW_DATA_URI_IMAGES = true +``` + +## Customizing CSS +The external renderer is specified in the .ini in the format `[markup.XXXXX]` and the HTML supplied by your external renderer will be wrapped in a `

` with classes `markup` and `XXXXX`. The `markup` class provides out of the box styling (as does `markdown` if `XXXXX` is `markdown`). Otherwise you can use these classes to specifically target the contents of your rendered HTML. + +And so you could write some CSS: +```css +.markup.XXXXX html { + font-size: 100%; + overflow-y: scroll; + -webkit-text-size-adjust: 100%; + -ms-text-size-adjust: 100%; +} + +.markup.XXXXX body { + color: #444; + font-family: Georgia, Palatino, 'Palatino Linotype', Times, 'Times New Roman', serif; + font-size: 12px; + line-height: 1.7; + padding: 1em; + margin: auto; + max-width: 42em; + background: #fefefe; +} + +.markup.XXXXX p { + color: orangered; +} +``` + +Add your stylesheet to your custom directory e.g `custom/public/css/my-style-XXXXX.css` and import it using a custom header file `custom/templates/custom/header.tmpl`: +```html + +``` diff --git a/docs/content/doc/advanced/logging-documentation.en-us.md b/docs/content/doc/advanced/logging-documentation.en-us.md index 73a5e0eae15d..478b6418c6cb 100644 --- a/docs/content/doc/advanced/logging-documentation.en-us.md +++ b/docs/content/doc/advanced/logging-documentation.en-us.md @@ -282,7 +282,7 @@ ROUTER = console COLORIZE = false ; this can be true if you can strip out the ansi coloring ``` -Sometimes it will be helpful get some specific `TRACE` level logging retricted +Sometimes it will be helpful get some specific `TRACE` level logging restricted to messages that match a specific `EXPRESSION`. Adjusting the `MODE` in the `[log]` section to `MODE = console,traceconsole` to add a new logger output `traceconsole` and then adding its corresponding section would be helpful: @@ -437,7 +437,8 @@ Gitea includes built-in log rotation, which should be enough for most deployment - Disable built-in log rotation by setting `LOG_ROTATE` to `false` in your `app.ini`. - Install `logrotate`. -- Configure `logrotate` to match your deployment requirements, see `man 8 logrotate` for configuration syntax details. In the `postrotate/endscript` block send Gitea a `USR1` signal via `kill -USR1` or `kill -10`, or run `gitea manager logging release-and-reopen` (with the appropriate environment). Ensure that your configurations apply to all files emitted by Gitea loggers as described in the above sections. -- Always do `logrotate /etc/logrotate.conf --debug` to test your configurations. +- Configure `logrotate` to match your deployment requirements, see `man 8 logrotate` for configuration syntax details. In the `postrotate/endscript` block send Gitea a `USR1` signal via `kill -USR1` or `kill -10` to the `gitea` process itself, or run `gitea manager logging release-and-reopen` (with the appropriate environment). Ensure that your configurations apply to all files emitted by Gitea loggers as described in the above sections. +- Always do `logrotate /etc/logrotate.conf --debug` to test your configurations. +- If you are using docker and are running from outside of the container you can use `docker exec -u $OS_USER $CONTAINER_NAME sh -c 'gitea manager logging release-and-reopen'` or `docker exec $CONTAINER_NAME sh -c '/bin/s6-svc -1 /etc/s6/gitea/'` or send `USR1` directly to the gitea process itself. The next `logrotate` jobs will include your configurations, so no restart is needed. You can also immediately reload `logrotate` with `logrotate /etc/logrotate.conf --force`. diff --git a/docs/content/doc/advanced/protected-tags.en-us.md b/docs/content/doc/advanced/protected-tags.en-us.md new file mode 100644 index 000000000000..4d109c227833 --- /dev/null +++ b/docs/content/doc/advanced/protected-tags.en-us.md @@ -0,0 +1,57 @@ +--- +date: "2021-05-14T00:00:00-00:00" +title: "Protected tags" +slug: "protected-tags" +weight: 45 +toc: false +draft: false +menu: + sidebar: + parent: "advanced" + name: "Protected tags" + weight: 45 + identifier: "protected-tags" +--- + +# Protected tags + +Protected tags allow control over who has permission to create or update git tags. Each rule allows you to match either an individual tag name, or use an appropriate pattern to control multiple tags at once. + +**Table of Contents** + +{{< toc >}} + +## Setting up protected tags + +To protect a tag, you need to follow these steps: + +1. Go to the repository’s **Settings** > **Tags** page. +1. Type a pattern to match a name. You can use a single name, a [glob pattern](https://pkg.go.dev/github.com/gobwas/glob#Compile) or a regular expression. +1. Choose the allowed users and/or teams. If you leave these fields empty no one is allowed to create or modify this tag. +1. Select **Save** to save the configuration. + +## Pattern protected tags + +The pattern uses [glob](https://pkg.go.dev/github.com/gobwas/glob#Compile) or regular expressions to match a tag name. For regular expressions you need to enclose the pattern in slashes. + +Examples: + +| Type | Pattern Protected Tag | Possible Matching Tags | +| ----- | ------------------------ | --------------------------------------- | +| Glob | `v*` | `v`, `v-1`, `version2` | +| Glob | `v[0-9]` | `v0`, `v1` up to `v9` | +| Glob | `*-release` | `2.1-release`, `final-release` | +| Glob | `gitea` | only `gitea` | +| Glob | `*gitea*` | `gitea`, `2.1-gitea`, `1_gitea-release` | +| Glob | `{v,rel}-*` | `v-`, `v-1`, `v-final`, `rel-`, `rel-x` | +| Glob | `*` | matches all possible tag names | +| Regex | `/\Av/` | `v`, `v-1`, `version2` | +| Regex | `/\Av[0-9]\z/` | `v0`, `v1` up to `v9` | +| Regex | `/\Av\d+\.\d+\.\d+\z/` | `v1.0.17`, `v2.1.0` | +| Regex | `/\Av\d+(\.\d+){0,2}\z/` | `v1`, `v2.1`, `v1.2.34` | +| Regex | `/-release\z/` | `2.1-release`, `final-release` | +| Regex | `/gitea/` | `gitea`, `2.1-gitea`, `1_gitea-release` | +| Regex | `/\Agitea\z/` | only `gitea` | +| Regex | `/^gitea$/` | only `gitea` | +| Regex | `/\A(v\|rel)-/` | `v-`, `v-1`, `v-final`, `rel-`, `rel-x` | +| Regex | `/.+/` | matches all possible tag names | diff --git a/docs/content/doc/advanced/repo-mirror.en-us.md b/docs/content/doc/advanced/repo-mirror.en-us.md new file mode 100644 index 000000000000..bda5b0fa5594 --- /dev/null +++ b/docs/content/doc/advanced/repo-mirror.en-us.md @@ -0,0 +1,88 @@ +--- +date: "2021-05-13T00:00:00-00:00" +title: "Repository Mirror" +slug: "repo-mirror" +weight: 45 +toc: false +draft: false +menu: + sidebar: + parent: "advanced" + name: "Repository Mirror" + weight: 45 + identifier: "repo-mirror" +--- + +# Repository Mirror + +Repository mirroring allows for the mirroring of repositories to and from external sources. You can use it to mirror branches, tags, and commits between repositories. + +**Table of Contents** + +{{< toc >}} + +## Use cases + +The following are some possible use cases for repository mirroring: + +- You migrated to Gitea but still need to keep your project in another source. In that case, you can simply set it up to mirror to Gitea (pull) and all the essential history of commits, tags, and branches are available in your Gitea instance. +- You have old projects in another source that you don’t use actively anymore, but don’t want to remove for archiving purposes. In that case, you can create a push mirror so that your active Gitea repository can push its changes to the old location. + +## Pulling from a remote repository + +For an existing remote repository, you can set up pull mirroring as follows: + +1. Select **New Migration** in the **Create...** menu on the top right. +2. Select the remote repository service. +3. Enter a repository URL. +4. If the repository needs authentication fill in your authentication information. +5. Check the box **This repository will be a mirror**. +5. Select **Migrate repository** to save the configuration. + +The repository now gets mirrored periodically from the remote repository. You can force a sync by selecting **Synchronize Now** in the repository settings. + +## Pushing to a remote repository + +For an existing repository, you can set up push mirroring as follows: + +1. In your repository, go to **Settings** > **Repository**, and then the **Mirror Settings** section. +2. Enter a repository URL. +3. If the repository needs authentication expand the **Authorization** section and fill in your authentication information. +4. Select **Add Push Mirror** to save the configuration. + +The repository now gets mirrored periodically to the remote repository. You can force a sync by selecting **Synchronize Now**. In case of an error a message displayed to help you resolve it. + +:exclamation::exclamation: **NOTE:** This will force push to the remote repository. This will overwrite any changes in the remote repository! :exclamation::exclamation: + +### Setting up a push mirror from Gitea to GitHub + +To set up a mirror from Gitea to GitHub, you need to follow these steps: + +1. Create a [GitHub personal access token](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token) with the *public_repo* box checked. +2. Fill in the **Git Remote Repository URL**: `https://github.com//.git`. +3. Fill in the **Authorization** fields with your GitHub username and the personal access token. +4. Select **Add Push Mirror** to save the configuration. + +The repository pushes shortly thereafter. To force a push, select the **Synchronize Now** button. + +### Setting up a push mirror from Gitea to GitLab + +To set up a mirror from Gitea to GitLab, you need to follow these steps: + +1. Create a [GitLab personal access token](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html) with *write_repository* scope. +2. Fill in the **Git Remote Repository URL**: `https:////.git`. +3. Fill in the **Authorization** fields with `oauth2` as **Username** and your GitLab personal access token as **Password**. +4. Select **Add Push Mirror** to save the configuration. + +The repository pushes shortly thereafter. To force a push, select the **Synchronize Now** button. + +### Setting up a push mirror from Gitea to Bitbucket + +To set up a mirror from Gitea to Bitbucket, you need to follow these steps: + +1. Create a [Bitbucket app password](https://support.atlassian.com/bitbucket-cloud/docs/app-passwords/) with the *Repository Write* box checked. +2. Fill in the **Git Remote Repository URL**: `https://bitbucket.org//.git`. +3. Fill in the **Authorization** fields with your Bitbucket username and the app password as **Password**. +4. Select **Add Push Mirror** to save the configuration. + +The repository pushes shortly thereafter. To force a push, select the **Synchronize Now** button. diff --git a/docs/content/doc/developers/api-usage.en-us.md b/docs/content/doc/developers/api-usage.en-us.md index 15fedbe2c164..06cbc9b72e1e 100644 --- a/docs/content/doc/developers/api-usage.en-us.md +++ b/docs/content/doc/developers/api-usage.en-us.md @@ -40,8 +40,42 @@ better understand this by looking at the code -- as of this writing, Gitea parses queries and headers to find the token in [modules/auth/auth.go](https://github.com/go-gitea/gitea/blob/6efdcaed86565c91a3dc77631372a9cc45a58e89/modules/auth/auth.go#L47). -You can create an API key token via your Gitea installation's web interface: -`Settings | Applications | Generate New Token`. +## Generating and listing API tokens + +A new token can be generated with a `POST` request to +`/users/:name/tokens`. + +Note that `/users/:name/tokens` is a special endpoint and requires you +to authenticate using `BasicAuth` and a password, as follows: + + +```sh +$ curl -XPOST -H "Content-Type: application/json" -k -d '{"name":"test"}' -u username:password https://gitea.your.host/api/v1/users//tokens +{"id":1,"name":"test","sha1":"9fcb1158165773dd010fca5f0cf7174316c3e37d","token_last_eight":"16c3e37d"} +``` + +The ``sha1`` (the token) is only returned once and is not stored in +plain-text. It will not be displayed when listing tokens with a `GET` +request; e.g. + +```sh +$ curl --request GET --url https://yourusername:password@gitea.your.host/api/v1/users//tokens +[{"name":"test","sha1":"","token_last_eight:"........":},{"name":"dev","sha1":"","token_last_eight":"........"}] +``` + +To use the API with basic authentication with two factor authentication +enabled, you'll need to send an additional header that contains the one +time password (6 digitrotating token). +An example of the header is `X-Gitea-OTP: 123456` where `123456` +is where you'd place the code from your authenticator. +Here is how the request would look like in curl: + +```sh +$ curl -H "X-Gitea-OTP: 123456" --request GET --url https://yourusername:yourpassword@gitea.your.host/api/v1/users/yourusername/tokens +``` + +You can also create an API key token via your Gitea installation's web +interface: `Settings | Applications | Generate New Token`. ## OAuth2 Provider @@ -82,26 +116,6 @@ or on The OpenAPI document is at: `https://gitea.your.host/swagger.v1.json` -## Listing your issued tokens via the API - -As mentioned in -[#3842](https://github.com/go-gitea/gitea/issues/3842#issuecomment-397743346), -`/users/:name/tokens` is special and requires you to authenticate -using BasicAuth, as follows: - -### Using basic authentication: - -```sh -$ curl --request GET --url https://yourusername:yourpassword@gitea.your.host/api/v1/users/yourusername/tokens -[{"name":"test","sha1":"..."},{"name":"dev","sha1":"..."}] -``` - -As of v1.8.0 of Gitea, if using basic authentication with the API and your user has two factor authentication enabled, you'll need to send an additional header that contains the one time password (6 digit rotating token). An example of the header is `X-Gitea-OTP: 123456` where `123456` is where you'd place the code from your authenticator. Here is how the request would look like in curl: - -```sh -$ curl -H "X-Gitea-OTP: 123456" --request GET --url https://yourusername:yourpassword@gitea.your.host/api/v1/users/yourusername/tokens -``` - ## Sudo The API allows admin users to sudo API requests as another user. Simply add either a `sudo=` parameter or `Sudo:` request header with the username of the user to sudo. diff --git a/docs/content/doc/developers/hacking-on-gitea.en-us.md b/docs/content/doc/developers/hacking-on-gitea.en-us.md index 516a33d2addc..96db0a01bb9e 100644 --- a/docs/content/doc/developers/hacking-on-gitea.en-us.md +++ b/docs/content/doc/developers/hacking-on-gitea.en-us.md @@ -73,6 +73,8 @@ One of these three distributions of Make will run on Windows: - The binary is called `mingw32-make.exe` instead of `make.exe`. Add the `bin` folder to `PATH`. - [Chocolatey package](https://chocolatey.org/packages/make). Run `choco install make` +**Note**: If you are attempting to build using make with Windows Command Prompt, you may run into issues. The above prompts (git bash, or mingw) are recommended, however if you only have command prompt (or potentially powershell) you can set environment variables using the [set](https://docs.microsoft.com/en-us/windows-server/administration/windows-commands/set_1) command, e.g. `set TAGS=bindata`. + ## Downloading and cloning the Gitea source code The recommended method of obtaining the source code is by using `git clone`. @@ -86,7 +88,7 @@ from within the `$GOPATH`, hence the `go get` approach is no longer recommended. ## Forking Gitea -Download the master Gitea source code as above. Then, fork the +Download the main Gitea source code as above. Then, fork the [Gitea repository](https://github.com/go-gitea/gitea) on GitHub, and either switch the git remote origin for your fork or add your fork as another remote: @@ -123,11 +125,11 @@ TAGS="bindata sqlite sqlite_unlock_notify" make build The `build` target will execute both `frontend` and `backend` sub-targets. If the `bindata` tag is present, the frontend files will be compiled into the binary. It is recommended to leave out the tag when doing frontend development so that changes will be reflected. -See `make help` for all available `make` targets. Also see [`.drone.yml`](https://github.com/go-gitea/gitea/blob/master/.drone.yml) to see how our continuous integration works. +See `make help` for all available `make` targets. Also see [`.drone.yml`](https://github.com/go-gitea/gitea/blob/main/.drone.yml) to see how our continuous integration works. ## Building continuously -To run and continously rebuild when source files change: +To run and continuously rebuild when source files change: ```bash make watch @@ -216,7 +218,7 @@ You should validate your generated Swagger file and spell-check it with: make swagger-validate misspell-check ``` -You should commit the changed swagger JSON file. The continous integration +You should commit the changed swagger JSON file. The continuous integration server will check that this has been done using: ```bash @@ -276,7 +278,7 @@ require `git lfs` to be installed. Other database tests are available but may need adjustment to the local environment. Look at -[`integrations/README.md`](https://github.com/go-gitea/gitea/blob/master/integrations/README.md) +[`integrations/README.md`](https://github.com/go-gitea/gitea/blob/main/integrations/README.md) for more information and how to run a single test. Our continuous integration will test the code passes its unit tests and that @@ -304,19 +306,19 @@ be cleaned up. A `launch.json` and `tasks.json` are provided within `contrib/ide/vscode` for Visual Studio Code. Look at -[`contrib/ide/README.md`](https://github.com/go-gitea/gitea/blob/master/contrib/ide/README.md) +[`contrib/ide/README.md`](https://github.com/go-gitea/gitea/blob/main/contrib/ide/README.md) for more information. ## Submitting PRs Once you're happy with your changes, push them up and open a pull request. It is recommended that you allow Gitea Managers and Owners to modify your PR -branches as we will need to update it to master before merging and/or may be +branches as we will need to update it to main before merging and/or may be able to help fix issues directly. Any PR requires two approvals from the Gitea maintainers and needs to pass the -continous integration. Take a look at our -[`CONTRIBUTING.md`](https://github.com/go-gitea/gitea/blob/master/CONTRIBUTING.md) +continuous integration. Take a look at our +[`CONTRIBUTING.md`](https://github.com/go-gitea/gitea/blob/main/CONTRIBUTING.md) document. If you need more help pop on to [Discord](https://discord.gg/gitea) #Develop diff --git a/docs/content/doc/developers/integrations.en-us.md b/docs/content/doc/developers/integrations.en-us.md index a1d8f0f938ce..8a111a3a8dc6 100644 --- a/docs/content/doc/developers/integrations.en-us.md +++ b/docs/content/doc/developers/integrations.en-us.md @@ -20,7 +20,7 @@ projects. We are curating a list over at [awesome-gitea](https://gitea.com/gitea/awesome-gitea) to track these! -If you are looking for [CI/CD](https://gitea.com/gitea/awesome-gitea#devops), -an [SDK](https://gitea.com/gitea/awesome-gitea#sdk), -or even some extra [themes](https://gitea.com/gitea/awesome-gitea#themes), +If you are looking for [CI/CD](https://gitea.com/gitea/awesome-gitea#user-content-devops), +an [SDK](https://gitea.com/gitea/awesome-gitea#user-content-sdk), +or even some extra [themes](https://gitea.com/gitea/awesome-gitea#user-content-themes), you can find them listed in the [awesome-gitea](https://gitea.com/gitea/awesome-gitea) repository! diff --git a/docs/content/doc/developers/integrations.zh-tw.md b/docs/content/doc/developers/integrations.zh-tw.md index 6a3f3f8bf326..6991ec4ae886 100644 --- a/docs/content/doc/developers/integrations.zh-tw.md +++ b/docs/content/doc/developers/integrations.zh-tw.md @@ -19,4 +19,4 @@ Gitea 有著很棒的第三方整合社群, 以及其它有著一流支援的 我們持續的整理一份清單以追蹤他們!請到 [awesome-gitea](https://gitea.com/gitea/awesome-gitea) 查看。 -如果您正在找尋有關 [CI/CD](https://gitea.com/gitea/awesome-gitea#devops)、[SDK](https://gitea.com/gitea/awesome-gitea#sdk) 或是其它佈景主題,您可以在存儲庫 [awesome-gitea](https://gitea.com/gitea/awesome-gitea) 找到他們。 +如果您正在找尋有關 [CI/CD](https://gitea.com/gitea/awesome-gitea#user-content-devops)、[SDK](https://gitea.com/gitea/awesome-gitea#user-content-sdk) 或是其它佈景主題,您可以在存儲庫 [awesome-gitea](https://gitea.com/gitea/awesome-gitea) 找到他們。 diff --git a/docs/content/doc/developers/migrations.en-us.md b/docs/content/doc/developers/migrations.en-us.md index 6e1a7e0195b7..e432ea3b3ff0 100644 --- a/docs/content/doc/developers/migrations.en-us.md +++ b/docs/content/doc/developers/migrations.en-us.md @@ -20,7 +20,7 @@ repository data from other git host platforms to Gitea or, in the future, migrat git host platforms. Currently, migrations from Github, Gitlab, and other Gitea instances are implemented. -First of all, Gitea defines some standard objects in packages [modules/migrations/base](https://github.com/go-gitea/gitea/tree/master/modules/migrations/base). +First of all, Gitea defines some standard objects in packages [modules/migrations/base](https://github.com/go-gitea/gitea/tree/main/modules/migrations/base). They are `Repository`, `Milestone`, `Release`, `ReleaseAsset`, `Label`, `Issue`, `Comment`, `PullRequest`, `Reaction`, `Review`, `ReviewComment`. ## Downloader Interfaces @@ -31,11 +31,11 @@ To migrate from a new git host platform, there are two steps to be updated. - You should implement a `DownloaderFactory` which will be used to detect if the URL matches and create the above `Downloader`. - You'll need to register the `DownloaderFactory` via `RegisterDownloaderFactory` on `init()`. -You can find these interfaces in [downloader.go](https://github.com/go-gitea/gitea/blob/master/modules/migrations/base/downloader.go). +You can find these interfaces in [downloader.go](https://github.com/go-gitea/gitea/blob/main/modules/migrations/base/downloader.go). ## Uploader Interface Currently, only a `GiteaLocalUploader` is implemented, so we only save downloaded data via this `Uploader` to the local Gitea instance. Other uploaders are not supported at this time. -You can find these interfaces in [uploader.go](https://github.com/go-gitea/gitea/blob/master/modules/migrations/base/uploader.go). +You can find these interfaces in [uploader.go](https://github.com/go-gitea/gitea/blob/main/modules/migrations/base/uploader.go). diff --git a/docs/content/doc/developers/oauth2-provider.md b/docs/content/doc/developers/oauth2-provider.md index 29305a24ca19..efe78eed9765 100644 --- a/docs/content/doc/developers/oauth2-provider.md +++ b/docs/content/doc/developers/oauth2-provider.md @@ -23,10 +23,13 @@ Gitea supports acting as an OAuth2 provider to allow third party applications to ## Endpoints -| Endpoint | URL | -| ---------------------- | --------------------------- | -| Authorization Endpoint | `/login/oauth/authorize` | -| Access Token Endpoint | `/login/oauth/access_token` | +| Endpoint | URL | +| ------------------------ | ----------------------------------- | +| OpenID Connect Discovery | `/.well-known/openid-configuration` | +| Authorization Endpoint | `/login/oauth/authorize` | +| Access Token Endpoint | `/login/oauth/access_token` | +| OpenID Connect UserInfo | `/login/oauth/userinfo` | +| JSON Web Key Set | `/login/oauth/keys` | ## Supported OAuth2 Grants diff --git a/docs/content/doc/features/authentication.en-us.md b/docs/content/doc/features/authentication.en-us.md index 0c83fa4d2f8d..21181ac1b20e 100644 --- a/docs/content/doc/features/authentication.en-us.md +++ b/docs/content/doc/features/authentication.en-us.md @@ -88,8 +88,8 @@ Adds the following fields: - Bind Password (optional) - The password for the Bind DN specified above, if any. _Note: The password - is stored in plaintext at the server. As such, ensure that the Bind DN - has as few privileges as possible._ + is stored encrypted with the SECRET_KEY on the server. It is still recommended + to ensure that the Bind DN has as few privileges as possible._ - User Search Base **(required)** @@ -259,7 +259,7 @@ Before activating SSPI single sign-on authentication (SSO) you have to prepare y - Create a service principal name for the host where `gitea.exe` is running with class `HTTP`: - - Start `Command Prompt` or `PowerShell` as a priviledged domain user (eg. Domain Administrator) + - Start `Command Prompt` or `PowerShell` as a privileged domain user (eg. Domain Administrator) - Run the command below, replacing `host.domain.local` with the fully qualified domain name (FQDN) of the server where the web application will be running, and `domain\user` with the name of the account created in the previous step: ```sh @@ -283,7 +283,7 @@ Before activating SSPI single sign-on authentication (SSO) you have to prepare y - Click the `Sign In` button on the dashboard and choose SSPI to be automatically logged in with the same user that is currently logged on to the computer - If it does not work, make sure that: - - You are not running the web browser on the same server where gitea is running. You should be running the web browser on a domain joined computer (client) that is different from the server. If both the client and server are runnning on the same computer NTLM will be prefered over Kerberos. + - You are not running the web browser on the same server where gitea is running. You should be running the web browser on a domain joined computer (client) that is different from the server. If both the client and server are running on the same computer NTLM will be preferred over Kerberos. - There is only one `HTTP/...` SPN for the host - The SPN contains only the hostname, without the port - You have added the URL of the web app to the `Local intranet zone` diff --git a/docs/content/doc/help/faq.en-us.md b/docs/content/doc/help/faq.en-us.md index b3cf1aa0e81e..953aa9f0120d 100644 --- a/docs/content/doc/help/faq.en-us.md +++ b/docs/content/doc/help/faq.en-us.md @@ -85,6 +85,12 @@ If certain clone options aren't showing up (HTTP/S or SSH), the following option `DISABLE_SSH`: if set to true, there will be no SSH link `SSH_EXPOSE_ANONYMOUS`: if set to false, SSH links will be hidden for anonymous users +## File upload fails with: 413 Request Entity Too Large + +This error occurs when the reverse proxy limits the file upload size. + +See the [reverse proxy guide]({{< relref "doc/usage/reverse-proxies.en-us.md" >}}) for a solution with nginx. + ## Custom Templates not loading or working incorrectly Gitea's custom templates must be added to the correct location or Gitea will not find and use them. @@ -142,7 +148,7 @@ The current way to achieve this is to create/modify a user with a max repo creat Restricted users are limited to a subset of the content based on their organization/team memberships and collaborations, ignoring the public flag on organizations/repos etc.\_\_ -Example use case: A company runs a Gitea instance that requires login. Most repos are public (accessible/browseable by all co-workers). +Example use case: A company runs a Gitea instance that requires login. Most repos are public (accessible/browsable by all co-workers). At some point, a customer or third party needs access to a specific repo and only that repo. Making such a customer account restricted and granting any needed access using team membership(s) and/or collaboration(s) is a simple way to achieve that without the need to make everything private. @@ -324,7 +330,13 @@ is too small. Gitea requires that the `ROWFORMAT` for its tables is `DYNAMIC`. If you are receiving an error line containing `Error 1071: Specified key was too long; max key length is 1000 bytes...` then you are attempting to run Gitea on tables which use the ISAM engine. While this may have worked by chance in previous versions of Gitea, it has never been officially supported and -you must use InnoDB. You should run `ALTER TABLE table_name ENGINE=InnoDB;` for each table in the database. +you must use InnoDB. You should run `ALTER TABLE table_name ENGINE=InnoDB;` for each table in the database. +If you are using MySQL 5, another possible fix is +```mysql +SET GLOBAL innodb_file_format=Barracuda; +SET GLOBAL innodb_file_per_table=1; +SET GLOBAL innodb_large_prefix=1; +``` ## Why Are Emoji Broken On MySQL diff --git a/docs/content/doc/installation/from-binary.en-us.md b/docs/content/doc/installation/from-binary.en-us.md index 9d8864956b28..aa075bb239e1 100644 --- a/docs/content/doc/installation/from-binary.en-us.md +++ b/docs/content/doc/installation/from-binary.en-us.md @@ -32,13 +32,17 @@ chmod +x gitea ``` ## Verify GPG signature -Gitea signs all binaries with a [GPG key](https://keys.openpgp.org/search?q=teabot%40gitea.io) to prevent against unwanted modification of binaries. To validate the binary, download the signature file which ends in `.asc` for the binary you downloaded and use the gpg command line tool. +Gitea signs all binaries with a [GPG key](https://keys.openpgp.org/search?q=teabot%40gitea.io) to prevent against unwanted modification of binaries. +To validate the binary, download the signature file which ends in `.asc` for the binary you downloaded and use the gpg command line tool. ```sh gpg --keyserver keys.openpgp.org --recv 7C9E68152594688862D62AF62D9AE806EC1592E2 gpg --verify gitea-{{< version >}}-linux-amd64.asc gitea-{{< version >}}-linux-amd64 ``` +Look for the text `Good signature from "Teabot "` to assert a good binary, +despite warnings like `This key is not certified with a trusted signature!`. + ## Recommended server configuration **NOTE:** Many of the following directories can be configured using [Environment Variables]({{< relref "doc/advanced/environment-variables.en-us.md" >}}) as well! diff --git a/docs/content/doc/installation/from-package.en-us.md b/docs/content/doc/installation/from-package.en-us.md index cdb5833e33ca..bc349ba42ef9 100644 --- a/docs/content/doc/installation/from-package.en-us.md +++ b/docs/content/doc/installation/from-package.en-us.md @@ -2,7 +2,7 @@ date: "2016-12-01T16:00:00+02:00" title: "Installation from package" slug: "install-from-package" -weight: 10 +weight: 20 toc: false draft: false menu: @@ -92,18 +92,6 @@ is in `/usr/local/etc/rc.d/gitea`. To enable Gitea to run as a service, run `sysrc gitea_enable=YES` and start it with `service gitea start`. -## Cloudron - -Gitea is available as a 1-click install on [Cloudron](https://cloudron.io). -Cloudron makes it easy to run apps like Gitea on your server and keep them up-to-date and secure. - -[![Install](/cloudron.svg)](https://cloudron.io/button.html?app=io.gitea.cloudronapp) - -The Gitea package is maintained [here](https://git.cloudron.io/cloudron/gitea-app). - -There is a [demo instance](https://my.demo.cloudron.io) (username: cloudron password: cloudron) where -you can experiment with running Gitea. - ## Third-party Various other third-party packages of Gitea exist. diff --git a/docs/content/doc/installation/from-source.en-us.md b/docs/content/doc/installation/from-source.en-us.md index 5525faf3d574..496111e95634 100644 --- a/docs/content/doc/installation/from-source.en-us.md +++ b/docs/content/doc/installation/from-source.en-us.md @@ -54,8 +54,8 @@ git clone https://github.com/go-gitea/gitea no longer necessary.) Decide which version of Gitea to build and install. Currently, there are -multiple options to choose from. The `master` branch represents the current -development version. To build with master, skip to the [build section](#build). +multiple options to choose from. The `main` branch represents the current +development version. To build with main, skip to the [build section](#build). To work with tagged releases, the following commands can be used: @@ -89,7 +89,7 @@ To build from source, the following programs must be present on the system: - `node` {{< min-node-version >}} or higher with `npm`, see [here](https://nodejs.org/en/download/) - `make`, see here -Various [make tasks](https://github.com/go-gitea/gitea/blob/master/Makefile) +Various [make tasks](https://github.com/go-gitea/gitea/blob/main/Makefile) are provided to keep the build process as simple as possible. Depending on requirements, the following build tags can be included. diff --git a/docs/content/doc/installation/on-cloud-provider.md b/docs/content/doc/installation/on-cloud-provider.md new file mode 100644 index 000000000000..c61c042af242 --- /dev/null +++ b/docs/content/doc/installation/on-cloud-provider.md @@ -0,0 +1,44 @@ +--- +date: "2016-12-01T16:00:00+02:00" +title: "Install on Cloud Provider" +slug: "install-on-cloud-provider" +weight: 20 +toc: false +draft: false +menu: + sidebar: + parent: "installation" + name: "On cloud provider" + weight: 20 + identifier: "install-on-cloud-provider" +--- + +# Installation on Cloud Provider + +**Table of Contents** + +{{< toc >}} + +## Cloudron + +Gitea is available as a 1-click install on [Cloudron](https://cloudron.io). +Cloudron makes it easy to run apps like Gitea on your server and keep them up-to-date and secure. + +[![Install](/cloudron.svg)](https://cloudron.io/button.html?app=io.gitea.cloudronapp) + +The Gitea package is maintained [here](https://git.cloudron.io/cloudron/gitea-app). + +There is a [demo instance](https://my.demo.cloudron.io) (username: cloudron password: cloudron) where +you can experiment with running Gitea. + +## Vultr + +Gitea can found in [Vultr](https://www.vultr.com)'s marketplace. + +To deploy it have a look at https://www.vultr.com/marketplace/apps/gitea. + +## DigitalOcean + +[DigitalOcean](https://www.digitalocean.com) has gitea as droplet in his marketplace. + +Just create a new [Gitea Droplet](https://marketplace.digitalocean.com/apps/gitea). diff --git a/docs/content/doc/installation/with-docker-rootless.en-us.md b/docs/content/doc/installation/with-docker-rootless.en-us.md index 672c842024d2..62902a8866b9 100644 --- a/docs/content/doc/installation/with-docker-rootless.en-us.md +++ b/docs/content/doc/installation/with-docker-rootless.en-us.md @@ -32,15 +32,14 @@ image as a service. Since there is no database available, one can be initialized Create a directory for `data` and `config` then paste the following content into a file named `docker-compose.yml`. Note that the volume should be owned by the user/group with the UID/GID specified in the config file. By default Gitea in docker will use uid:1000 gid:1000. If needed you can set ownership on those folders with the command: `sudo chown 1000:1000 config/ data/` If you don't give the volume correct permissions, the container may not start. -Also be aware that the tag `:latest-rootless` will install the current development version. -For a stable release you can use `:1-rootless` or specify a certain release like `:{{< version >}}-rootless`. +For a stable release you could use `:latest-rootless`, `:1-rootless` or specify a certain release like `:{{< version >}}-rootless`, but if you'd like to use the latest development version then `:dev-rootless` would be an appropriate tag. ```yaml version: "2" services: server: - image: gitea/gitea:latest-rootless + image: gitea/gitea:{{< version >}}-rootless restart: always volumes: - ./data:/var/lib/gitea @@ -63,7 +62,7 @@ version: "2" services: server: - image: gitea/gitea:latest-rootless + image: gitea/gitea:{{< version >}}-rootless restart: always volumes: - ./data:/var/lib/gitea @@ -87,7 +86,7 @@ version: "2" services: server: - image: gitea/gitea:latest-rootless + image: gitea/gitea:{{< version >}}-rootless + environment: + - GITEA__database__DB_TYPE=mysql + - GITEA__database__HOST=db:3306 @@ -107,7 +106,7 @@ services: + - db + + db: -+ image: mysql:5.7 ++ image: mysql:8 + restart: always + environment: + - MYSQL_ROOT_PASSWORD=gitea @@ -128,7 +127,7 @@ version: "2" services: server: - image: gitea/gitea:latest-rootless + image: gitea/gitea:{{< version >}}-rootless environment: + - GITEA__database__DB_TYPE=postgres + - GITEA__database__HOST=db:5432 @@ -148,7 +147,7 @@ services: + - db + + db: -+ image: postgres:9.6 ++ image: postgres:13 + restart: always + environment: + - POSTGRES_USER=gitea @@ -174,7 +173,7 @@ version: "2" + services: server: - image: gitea/gitea:latest-rootless + image: gitea/gitea:{{< version >}}-rootless restart: always volumes: - - ./data:/var/lib/gitea @@ -201,7 +200,7 @@ version: "2" services: server: - image: gitea/gitea:latest-rootless + image: gitea/gitea:{{< version >}}-rootless restart: always + user: 1001 volumes: @@ -262,11 +261,11 @@ docker-compose up -d - Rename folder (inside volume) gitea to custom - Edit app.ini if needed - Set START_SSH_SERVER = true -- Use image gitea/gitea:latest-rootless +- Use image gitea/gitea:{{< version >}}-rootless ## Managing Deployments With Environment Variables -In addition to the environment variables above, any settings in `app.ini` can be set or overridden with an environment variable of the form: `GITEA__SECTION_NAME__KEY_NAME`. These settings are applied each time the docker container starts. Full information [here](https://github.com/go-gitea/gitea/tree/master/contrib/environment-to-ini). +In addition to the environment variables above, any settings in `app.ini` can be set or overridden with an environment variable of the form: `GITEA__SECTION_NAME__KEY_NAME`. These settings are applied each time the docker container starts. Full information [here](https://github.com/go-gitea/gitea/tree/main/contrib/environment-to-ini). These environment variables can be passed to the docker container in `docker-compose.yml`. The following example will enable an smtp mail server if the required env variables `GITEA__mailer__FROM`, `GITEA__mailer__HOST`, `GITEA__mailer__PASSWD` are set on the host or in a `.env` file in the same directory as `docker-compose.yml`: diff --git a/docs/content/doc/installation/with-docker.en-us.md b/docs/content/doc/installation/with-docker.en-us.md index 83f82d85d635..3c51852fba98 100644 --- a/docs/content/doc/installation/with-docker.en-us.md +++ b/docs/content/doc/installation/with-docker.en-us.md @@ -34,8 +34,7 @@ image as a service. Since there is no database available, one can be initialized Create a directory like `gitea` and paste the following content into a file named `docker-compose.yml`. Note that the volume should be owned by the user/group with the UID/GID specified in the config file. If you don't give the volume correct permissions, the container may not start. -Also be aware that the tag `:latest` will install the current development version. -For a stable release you can use `:1` or specify a certain release like `:{{< version >}}`. +For a stable release you can use `:latest`, `:1` or specify a certain release like `:{{< version >}}`, but if you'd like to use the latest development version of Gitea then you could use the `:dev` tag. ```yaml version: "3" @@ -137,7 +136,7 @@ services: + - db + + db: -+ image: mysql:5.7 ++ image: mysql:8 + restart: always + environment: + - MYSQL_ROOT_PASSWORD=gitea @@ -188,7 +187,7 @@ services: + - db + + db: -+ image: postgres:9.6 ++ image: postgres:13 + restart: always + environment: + - POSTGRES_USER=gitea diff --git a/docs/content/doc/installation/with-docker.zh-cn.md b/docs/content/doc/installation/with-docker.zh-cn.md index d32b774c201a..f823e0d74959 100644 --- a/docs/content/doc/installation/with-docker.zh-cn.md +++ b/docs/content/doc/installation/with-docker.zh-cn.md @@ -122,7 +122,7 @@ services: + - db + + db: -+ image: mysql:5.7 ++ image: mysql:8 + restart: always + environment: + - MYSQL_ROOT_PASSWORD=gitea @@ -172,7 +172,7 @@ services: + - db + + db: -+ image: postgres:9.6 ++ image: postgres:13 + restart: always + environment: + - POSTGRES_USER=gitea diff --git a/docs/content/doc/upgrade/from-gogs.en-us.md b/docs/content/doc/upgrade/from-gogs.en-us.md index 794ab3e87253..778c1a173095 100644 --- a/docs/content/doc/upgrade/from-gogs.en-us.md +++ b/docs/content/doc/upgrade/from-gogs.en-us.md @@ -96,7 +96,7 @@ See [#4286](https://github.com/go-gitea/gitea/issues/4286). ## Add Gitea to startup on Unix -Update the appropriate file from [gitea/contrib](https://github.com/go-gitea/gitea/tree/master/contrib) +Update the appropriate file from [gitea/contrib](https://github.com/go-gitea/gitea/tree/main/contrib) with the right environment variables. For distros with systemd: diff --git a/docs/content/doc/usage/command-line.en-us.md b/docs/content/doc/usage/command-line.en-us.md index 40933a7b333c..0bc8d70fdb53 100644 --- a/docs/content/doc/usage/command-line.en-us.md +++ b/docs/content/doc/usage/command-line.en-us.md @@ -46,6 +46,8 @@ Starts the server: - `--port number`, `-p number`: Port number. Optional. (default: 3000). Overrides configuration file. - `--install-port number`: Port number to run the install page on. Optional. (default: 3000). Overrides configuration file. - `--pid path`, `-P path`: Pidfile path. Optional. + - `--quiet`, `-q`: Only emit Fatal logs on the console for logs emitted before logging set up. + - `--verbose`: Emit tracing logs on the console for logs emitted before logging is set-up. - Examples: - `gitea web` - `gitea web --port 80` diff --git a/docs/content/doc/usage/email-setup.en-us.md b/docs/content/doc/usage/email-setup.en-us.md index 54ccfbf82ac4..390dc78e5069 100644 --- a/docs/content/doc/usage/email-setup.en-us.md +++ b/docs/content/doc/usage/email-setup.en-us.md @@ -19,12 +19,15 @@ menu: {{< toc >}} -To use Gitea's built-in Email support, update the `app.ini` config file [mailer] section: +Gitea has mailer functionality for sending transactional emails (such as registration confirmation). It can be configured to either use Sendmail (or compatible MTAs like Postfix and msmtp) or directly use SMTP server. -## Sendmail version +## Using Sendmail -Use the operating system’s sendmail command instead of SMTP. This is common on Linux servers. -Note: For use in the official Gitea Docker image, please configure with the SMTP version. +Use `sendmail` command as mailer. + +Note: For use in the official Gitea Docker image, please configure with the SMTP version (see the following section). + +Note: For Internet-facing sites consult documentation of your MTA for instructions to send emails over TLS. Also set up SPF, DMARC, and DKIM DNS records to make emails sent be accepted as legitimate by various email providers. ```ini [mailer] @@ -34,7 +37,9 @@ MAILER_TYPE = sendmail SENDMAIL_PATH = /usr/sbin/sendmail ``` -## SMTP version +## Using SMTP + +Directly use SMTP server as relay. This option is useful if you don't want to set up MTA on your instance but you have an account at email provider. ```ini [mailer] @@ -47,17 +52,19 @@ USER = gitea@mydomain.com PASSWD = `password` ``` -- Restart Gitea for the configuration changes to take effect. +Restart Gitea for the configuration changes to take effect. -- To send a test email to validate the settings, go to Gitea > Site Administration > Configuration > SMTP Mailer Configuration. +To send a test email to validate the settings, go to Gitea > Site Administration > Configuration > SMTP Mailer Configuration. For the full list of options check the [Config Cheat Sheet]({{< relref "doc/advanced/config-cheat-sheet.en-us.md" >}}) -- Please note: authentication is only supported when the SMTP server communication is encrypted with TLS or `HOST=localhost`. TLS encryption can be through: - - Via the server supporting TLS through STARTTLS - usually provided on port 587. (Also known as Opportunistic TLS.) - - SMTPS connection (SMTP over transport layer security) via the default port 465. +Please note: authentication is only supported when the SMTP server communication is encrypted with TLS or `HOST=localhost`. TLS encryption can be through: + - STARTTLS (also known as Opportunistic TLS) via port 587. Initial connection is done over cleartext, but then be upgraded over TLS if the server supports it. + - SMTPS connection (SMTP over TLS) via the default port 465. Connection to the server use TLS from the beginning. - Forced SMTPS connection with `IS_TLS_ENABLED=true`. (These are both known as Implicit TLS.) -- This is due to protections imposed by the Go internal libraries against STRIPTLS attacks. +This is due to protections imposed by the Go internal libraries against STRIPTLS attacks. + +Note that Implicit TLS is recommended by [RFC8314](https://tools.ietf.org/html/rfc8314#section-3) since 2018. ### Gmail @@ -74,3 +81,4 @@ MAILER_TYPE = smtp IS_TLS_ENABLED = true HELO_HOSTNAME = example.com ``` + diff --git a/docs/content/doc/usage/fail2ban-setup.en-us.md b/docs/content/doc/usage/fail2ban-setup.en-us.md index 790d4c020b65..f96cf889a398 100644 --- a/docs/content/doc/usage/fail2ban-setup.en-us.md +++ b/docs/content/doc/usage/fail2ban-setup.en-us.md @@ -29,22 +29,32 @@ on a bad authentication from the web or CLI using SSH or HTTP respectively: ```log 2020/10/15 16:05:09 modules/ssh/ssh.go:143:publicKeyHandler() [W] Failed authentication attempt from xxx.xxx.xxx.xxx ``` +(DEPRECATED: This may be a false positive as the user may still go on to correctly authenticate.) ```log 2020/10/15 16:05:09 modules/ssh/ssh.go:155:publicKeyHandler() [W] Failed authentication attempt from xxx.xxx.xxx.xxx ``` +(DEPRECATED: This may be a false positive as the user may still go on to correctly authenticate.) ```log 2020/10/15 16:05:09 modules/ssh/ssh.go:198:publicKeyHandler() [W] Failed authentication attempt from xxx.xxx.xxx.xxx ``` +(DEPRECATED: This may be a false positive as the user may still go on to correctly authenticate.) ```log 2020/10/15 16:05:09 modules/ssh/ssh.go:213:publicKeyHandler() [W] Failed authentication attempt from xxx.xxx.xxx.xxx ``` +(DEPRECATED: This may be a false positive as the user may still go on to correctly authenticate.) ```log 2020/10/15 16:05:09 modules/ssh/ssh.go:227:publicKeyHandler() [W] Failed authentication attempt from xxx.xxx.xxx.xxx ``` +(DEPRECATED: This may be a false positive as the user may still go on to correctly authenticate.) + +```log +2020/10/15 16:05:09 modules/ssh/ssh.go:249:sshConnectionFailed() [W] Failed authentication attempt from xxx.xxx.xxx.xxx +``` +(From 1.15 this new message will available and doesn't have any of the false positive results that above messages from publicKeyHandler do. This will only be logged if the user has completely failed authentication.) ```log 2020/10/15 16:08:44 ...s/context/context.go:204:HandleText() [E] invalid credentials from xxx.xxx.xxx.xxx diff --git a/docs/content/doc/usage/git-lfs-support.md b/docs/content/doc/usage/git-lfs-support.md index 66e521761212..a8f935de2bf0 100644 --- a/docs/content/doc/usage/git-lfs-support.md +++ b/docs/content/doc/usage/git-lfs-support.md @@ -24,3 +24,5 @@ LFS_START_SERVER = true ; Where your lfs files reside, default is data/lfs. LFS_CONTENT_PATH = /home/gitea/data/lfs ``` + +**Note**: LFS server support needs at least Git v2.1.2 installed on the server diff --git a/docs/content/doc/usage/reverse-proxies.en-us.md b/docs/content/doc/usage/reverse-proxies.en-us.md index 74e39039d79f..5f1e6685ecfb 100644 --- a/docs/content/doc/usage/reverse-proxies.en-us.md +++ b/docs/content/doc/usage/reverse-proxies.en-us.md @@ -120,6 +120,14 @@ server { } ``` +## Resolving Error: 413 Request Entity Too Large + +This error indicates nginx is configured to restrict the file upload size. + +In your nginx config file containing your Gitea proxy directive, find the `location { ... }` block for Gitea and add the line +`client_max_body_size 16M;` to set this limit to 16 megabytes or any other number of choice. + + ## Apache HTTPD If you want Apache HTTPD to serve your Gitea instance, you can add the following to your Apache HTTPD configuration (usually located at `/etc/apache2/httpd.conf` in Ubuntu): @@ -221,12 +229,28 @@ If you wish to run Gitea with IIS. You will need to setup IIS with URL Rewrite a ```xml + + + + + + + + + + + + + + + + - + - + @@ -255,6 +279,16 @@ If you wish to run Gitea with IIS. You will need to setup IIS with URL Rewrite a + + + + + + + + + ``` diff --git a/docs/content/page/index.en-us.md b/docs/content/page/index.en-us.md index 98d61576f3d1..a5204e17dff7 100644 --- a/docs/content/page/index.en-us.md +++ b/docs/content/page/index.en-us.md @@ -117,7 +117,7 @@ Windows, on architectures like amd64, i386, ARM, PowerPC, and others. - Configuration viewer - Everything in config file - System notices - - When somthing unexpected happens + - When something unexpected happens - Monitoring - Current processes - Cron jobs @@ -155,7 +155,7 @@ Windows, on architectures like amd64, i386, ARM, PowerPC, and others. - Libravatar - Custom - Password - - Mutiple email addresses + - Multiple email addresses - SSH Keys - Connected applications - Two factor authentication diff --git a/go.mod b/go.mod index 9a3ef0652221..5032acce990f 100644 --- a/go.mod +++ b/go.mod @@ -11,45 +11,47 @@ require ( gitea.com/go-chi/captcha v0.0.0-20210110083842-e7696c336a1e gitea.com/go-chi/session v0.0.0-20210108030337-0cb48c5ba8ee gitea.com/lunny/levelqueue v0.3.0 - github.com/Microsoft/go-winio v0.4.18 // indirect + github.com/Microsoft/go-winio v0.5.0 // indirect github.com/NYTimes/gziphandler v1.1.1 - github.com/PuerkitoBio/goquery v1.5.1 - github.com/RoaringBitmap/roaring v0.6.0 // indirect - github.com/alecthomas/chroma v0.8.2 - github.com/andybalholm/brotli v1.0.1 // indirect - github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect - github.com/blevesearch/bleve/v2 v2.0.3 + github.com/ProtonMail/go-crypto v0.0.0-20210705153151-cc34b1f6908b // indirect + github.com/PuerkitoBio/goquery v1.7.0 + github.com/RoaringBitmap/roaring v0.9.1 // indirect + github.com/alecthomas/chroma v0.9.2 + github.com/andybalholm/brotli v1.0.3 // indirect + github.com/andybalholm/cascadia v1.2.0 // indirect + github.com/blevesearch/bleve/v2 v2.0.6 github.com/boombuler/barcode v1.0.1 // indirect github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b // indirect - github.com/caddyserver/certmagic v0.13.0 - github.com/chavacava/garif v0.0.0-20210405164556-e8a0a408d6af // indirect + github.com/caddyserver/certmagic v0.14.0 github.com/chi-middleware/proxy v1.1.1 github.com/couchbase/go-couchbase v0.0.0-20210224140812-5740cd35f448 // indirect github.com/couchbase/gomemcached v0.1.2 // indirect github.com/couchbase/goutils v0.0.0-20210118111533-e33d3ffb5401 // indirect github.com/denisenkom/go-mssqldb v0.10.0 github.com/dgrijalva/jwt-go v3.2.0+incompatible - github.com/dlclark/regexp2 v1.4.0 // indirect + github.com/djherbis/buffer v1.2.0 + github.com/djherbis/nio/v3 v3.0.1 github.com/dustin/go-humanize v1.0.0 github.com/editorconfig/editorconfig-core-go/v2 v2.4.2 github.com/emirpasic/gods v1.12.0 github.com/ethantkoenig/rupture v1.0.0 - github.com/gliderlabs/ssh v0.3.2 + github.com/gliderlabs/ssh v0.3.3 github.com/go-asn1-ber/asn1-ber v1.5.3 // indirect github.com/go-chi/chi v1.5.4 github.com/go-chi/cors v1.2.0 - github.com/go-enry/go-enry/v2 v2.6.1 - github.com/go-git/go-billy/v5 v5.1.0 - github.com/go-git/go-git/v5 v5.3.0 + github.com/go-enry/go-enry/v2 v2.7.1 + github.com/go-git/go-billy/v5 v5.3.1 + github.com/go-git/go-git/v5 v5.4.3-0.20210630082519-b4368b2a2ca4 github.com/go-ldap/ldap/v3 v3.3.0 - github.com/go-redis/redis/v8 v8.8.2 + github.com/go-redis/redis/v8 v8.11.0 github.com/go-sql-driver/mysql v1.6.0 github.com/go-swagger/go-swagger v0.27.0 - github.com/go-testfixtures/testfixtures/v3 v3.6.0 + github.com/go-testfixtures/testfixtures/v3 v3.6.1 github.com/gobwas/glob v0.2.3 github.com/gogs/chardet v0.0.0-20191104214054-4b6791f73a28 github.com/gogs/cron v0.0.0-20171120032916-9f6c956d3e14 github.com/gogs/go-gogs-client v0.0.0-20210131175652-1d7215cd8d85 + github.com/golang/snappy v0.0.4 // indirect github.com/google/go-github/v32 v32.1.0 github.com/google/go-querystring v1.1.0 // indirect github.com/google/uuid v1.2.0 @@ -57,45 +59,46 @@ require ( github.com/gorilla/mux v1.8.0 // indirect github.com/gorilla/sessions v1.2.1 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect + github.com/hashicorp/go-retryablehttp v0.7.0 // indirect github.com/hashicorp/go-version v1.3.1 + github.com/hashicorp/golang-lru v0.5.4 github.com/huandu/xstrings v1.3.2 github.com/issue9/identicon v1.2.0 github.com/jaytaylor/html2text v0.0.0-20200412013138-3577fbdbcff7 - github.com/json-iterator/go v1.1.10 + github.com/json-iterator/go v1.1.11 github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 github.com/kevinburke/ssh_config v1.1.0 // indirect github.com/keybase/go-crypto v0.0.0-20200123153347-de78d2cb44f4 - github.com/klauspost/compress v1.12.1 + github.com/klauspost/compress v1.13.1 + github.com/klauspost/cpuid/v2 v2.0.8 // indirect github.com/klauspost/pgzip v1.2.5 // indirect github.com/lafriks/xormstore v1.4.0 - github.com/lib/pq v1.10.1 + github.com/lib/pq v1.10.2 github.com/lunny/dingtalk_webhook v0.0.0-20171025031554-e3534c89ef96 github.com/markbates/goth v1.67.1 - github.com/mattn/go-isatty v0.0.12 - github.com/mattn/go-runewidth v0.0.12 // indirect + github.com/mattn/go-isatty v0.0.13 + github.com/mattn/go-runewidth v0.0.13 // indirect github.com/mattn/go-sqlite3 v1.14.7 - github.com/mgechev/dots v0.0.0-20190921121421-c36f7dcfbb81 - github.com/mgechev/revive v1.0.6 github.com/mholt/archiver/v3 v3.5.0 - github.com/microcosm-cc/bluemonday v1.0.8 - github.com/miekg/dns v1.1.40 // indirect + github.com/microcosm-cc/bluemonday v1.0.15 + github.com/miekg/dns v1.1.43 // indirect github.com/minio/md5-simd v1.1.2 // indirect - github.com/minio/minio-go/v7 v7.0.10 + github.com/minio/minio-go/v7 v7.0.12 github.com/minio/sha256-simd v1.0.0 // indirect - github.com/mitchellh/go-homedir v1.1.0 github.com/mrjones/oauth v0.0.0-20190623134757-126b35219450 // indirect github.com/msteinert/pam v0.0.0-20201130170657-e61372126161 github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 github.com/niklasfasching/go-org v1.5.0 + github.com/olekukonko/tablewriter v0.0.5 // indirect github.com/oliamb/cutter v0.2.2 - github.com/olivere/elastic/v7 v7.0.24 - github.com/pelletier/go-toml v1.9.0 - github.com/pierrec/lz4/v4 v4.1.3 // indirect + github.com/olivere/elastic/v7 v7.0.25 + github.com/pelletier/go-toml v1.9.0 // indirect + github.com/pierrec/lz4/v4 v4.1.8 // indirect github.com/pkg/errors v0.9.1 github.com/pquerna/otp v1.3.0 - github.com/prometheus/client_golang v1.10.0 + github.com/prometheus/client_golang v1.11.0 github.com/quasoft/websspi v1.0.0 - github.com/rivo/uniseg v0.2.0 // indirect + github.com/rs/xid v1.3.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/sergi/go-diff v1.2.0 github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749 // indirect @@ -108,25 +111,27 @@ require ( github.com/unknwon/com v1.0.1 github.com/unknwon/i18n v0.0.0-20210321134014-0ebbf2df1c44 github.com/unknwon/paginater v0.0.0-20200328080006-042474bd0eae - github.com/unrolled/render v1.1.0 + github.com/unrolled/render v1.4.0 github.com/urfave/cli v1.22.5 - github.com/willf/bitset v1.1.11 // indirect - github.com/xanzy/go-gitlab v0.48.0 + github.com/xanzy/go-gitlab v0.50.1 github.com/yohcop/openid-go v1.0.0 - github.com/yuin/goldmark v1.3.5 - github.com/yuin/goldmark-highlighting v0.0.0-20200307114337-60d527fdb691 + github.com/yuin/goldmark v1.4.0 + github.com/yuin/goldmark-highlighting v0.0.0-20210516132338-9216f9c5aa01 github.com/yuin/goldmark-meta v1.0.0 + go.etcd.io/bbolt v1.3.6 // indirect go.jolheiser.com/hcaptcha v0.0.4 go.jolheiser.com/pwn v0.0.3 - go.uber.org/multierr v1.6.0 // indirect - go.uber.org/zap v1.16.0 // indirect - golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b - golang.org/x/net v0.0.0-20210421230115-4e50805a0758 - golang.org/x/oauth2 v0.0.0-20210413134643-5e61552d6c78 - golang.org/x/sys v0.0.0-20210421221651-33663a62ff08 + go.uber.org/atomic v1.8.0 // indirect + go.uber.org/multierr v1.7.0 // indirect + go.uber.org/zap v1.18.1 // indirect + golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e + golang.org/x/net v0.0.0-20210614182718-04defd469f4e + golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914 + golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c golang.org/x/text v0.3.6 - golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect + golang.org/x/time v0.0.0-20210611083556-38a9dc6acbc6 // indirect golang.org/x/tools v0.1.0 + google.golang.org/protobuf v1.27.1 // indirect gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df gopkg.in/ini.v1 v1.62.0 @@ -134,7 +139,7 @@ require ( mvdan.cc/xurls/v2 v2.2.0 strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251 xorm.io/builder v0.3.9 - xorm.io/xorm v1.0.7 + xorm.io/xorm v1.1.2 ) replace github.com/hashicorp/go-version => github.com/6543/go-version v1.3.1 diff --git a/go.sum b/go.sum index 70867f495fe2..fc6e2422c849 100644 --- a/go.sum +++ b/go.sum @@ -57,48 +57,46 @@ github.com/6543/go-version v1.3.1 h1:HvOp+Telns7HWJ2Xo/05YXQSB2bE0WmVgbHqwMPZT4U github.com/6543/go-version v1.3.1/go.mod h1:oqFAHCwtLVUTLdhQmVZWYvaHXTdsbB4SY85at64SQEo= github.com/Azure/go-ntlmssp v0.0.0-20200615164410-66371956d46c h1:/IBSNwUN8+eKzUzbJPqhK839ygXJ82sde8x3ogr6R28= github.com/Azure/go-ntlmssp v0.0.0-20200615164410-66371956d46c/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU= -github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/GeertJohan/go.incremental v1.0.0/go.mod h1:6fAjUhbVuX1KcMD3c8TEgVUqmo4seqhv0i0kdATSkM0= -github.com/GeertJohan/go.rice v1.0.0/go.mod h1:eH6gbSOAUv07dQuZVnBmoDP8mgsM1rtixis4Tib9if0= -github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= +github.com/Julusian/godocdown v0.0.0-20170816220326-6d19f8ff2df8/go.mod h1:INZr5t32rG59/5xeltqoCJoNY7e5x/3xoY9WSWVWg74= github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA= github.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0= -github.com/Microsoft/go-winio v0.4.18 h1:yjwCO1nhWEShaA5qsmPOBzAOjRCa2PRLsDNZ5yBWXpg= -github.com/Microsoft/go-winio v0.4.18/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= +github.com/Microsoft/go-winio v0.5.0 h1:Elr9Wn+sGKPlkaBvwu4mTrxtmOp3F3yV9qhaHbXGjwU= +github.com/Microsoft/go-winio v0.5.0/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= github.com/NYTimes/gziphandler v1.1.1 h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cqUQ3I= github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/PuerkitoBio/goquery v1.5.1 h1:PSPBGne8NIUWw+/7vFBV+kG2J/5MOjbzc7154OaKCSE= +github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7/go.mod h1:z4/9nQmJSSwwds7ejkxaJwO37dru3geImFUdJlaLzQo= +github.com/ProtonMail/go-crypto v0.0.0-20210705153151-cc34b1f6908b h1:BF5p87XWvmgdrTPPzcRMwC0TMQbviwQ+uBKfNfWJy50= +github.com/ProtonMail/go-crypto v0.0.0-20210705153151-cc34b1f6908b/go.mod h1:z4/9nQmJSSwwds7ejkxaJwO37dru3geImFUdJlaLzQo= github.com/PuerkitoBio/goquery v1.5.1/go.mod h1:GsLWisAFVj4WgDibEWF4pvYnkVQBpKBKeU+7zCJoLcc= +github.com/PuerkitoBio/goquery v1.7.0 h1:O5SP3b9JWqMSVMG69zMfj577zwkSNpxrFf7ybS74eiw= +github.com/PuerkitoBio/goquery v1.7.0/go.mod h1:GsLWisAFVj4WgDibEWF4pvYnkVQBpKBKeU+7zCJoLcc= github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI= github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/RoaringBitmap/gocroaring v0.4.0/go.mod h1:NieMwz7ZqwU2DD73/vvYwv7r4eWBKuPVSXZIpsaMwCI= +github.com/RoaringBitmap/real-roaring-datasets v0.0.0-20190726190000-eb7c87156f76/go.mod h1:oM0MHmQ3nDsq609SS36p+oYbRi16+oVvU2Bw4Ipv0SE= github.com/RoaringBitmap/roaring v0.4.23/go.mod h1:D0gp8kJQgE1A4LQ5wFLggQEyvDi06Mq5mKs52e1TwOo= -github.com/RoaringBitmap/roaring v0.6.0 h1:tZcn2nJpUrZf+xQY8x+9QY7BxSETMjkdNG4Ts5zahyU= -github.com/RoaringBitmap/roaring v0.6.0/go.mod h1:WZ83fjBF/7uBHi6QoFyfGL4+xuV4Qn+xFkm4+vSzrhE= -github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= -github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= -github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g= -github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c= +github.com/RoaringBitmap/roaring v0.7.3/go.mod h1:jdT9ykXwHFNdJbEtxePexlFYH9LXucApeS0/+/g+p1I= +github.com/RoaringBitmap/roaring v0.9.1 h1:5PRizBmoN/PfV17nPNQou4dHQ7NcJi8FO/bihdYyCEM= +github.com/RoaringBitmap/roaring v0.9.1/go.mod h1:h1B7iIUOmnAeb5ytYMvnHJwxMc6LUrwBnzXWRuqTQUc= +github.com/acomagu/bufpipe v1.0.3 h1:fxAGrHZTgQ9w5QqVItgzwj235/uYZYgbXitB+dLupOk= +github.com/acomagu/bufpipe v1.0.3/go.mod h1:mxdxdup/WdsKVreO5GpW4+M/1CE2sMG4jeGJ2sYmHc4= github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM= -github.com/akavel/rsrc v0.8.0/go.mod h1:uLoCtb9J+EyAqh+26kdrTgmzRBFPGOolLWKpdxkKq+c= -github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7 h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs= -github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs= github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U= github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI= github.com/alecthomas/chroma v0.7.2-0.20200305040604-4f3623dce67a/go.mod h1:fv5SzZPFJbwp2NXJWpFIX7DZS4HgV1K4ew4Pc2OZD9s= -github.com/alecthomas/chroma v0.8.2 h1:x3zkuE2lUk/RIekyAJ3XRqSCP4zwWDfcw/YJCuCAACg= github.com/alecthomas/chroma v0.8.2/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM= +github.com/alecthomas/chroma v0.9.2 h1:yU1sE2+TZbLIQPMk30SolL2Hn53SR/Pv750f7qZ/XMs= +github.com/alecthomas/chroma v0.9.2/go.mod h1:eMuEnpA18XbG/WhOWtCzJHS7WqEtDAI+HxdwoW0nVSk= github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo= github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0= -github.com/alecthomas/kong v0.1.17-0.20190424132513-439c674f7ae0/go.mod h1:+inYUSluD+p4L8KdviBSgzcqEjUQOfC5fQDRFuc36lI= github.com/alecthomas/kong v0.2.1-0.20190708041108-0548c6b1afae/go.mod h1:+inYUSluD+p4L8KdviBSgzcqEjUQOfC5fQDRFuc36lI= github.com/alecthomas/kong v0.2.4/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE= -github.com/alecthomas/kong-hcl v0.1.8-0.20190615233001-b21fea9723c8/go.mod h1:MRgZdU3vrFd05IQ89AxUZ0aYdF39BYoNFa324SodPCA= github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 h1:p9Sln00KOTlrYkxI1zYWl1QLnEqAqEARBEYa8FQnQcY= github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= @@ -108,22 +106,20 @@ github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRF github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= github.com/andybalholm/brotli v1.0.0/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= -github.com/andybalholm/brotli v1.0.1 h1:KqhlKozYbRtJvsPrrEeXcO+N2l6NYT5A2QAFmSULpEc= -github.com/andybalholm/brotli v1.0.1/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= -github.com/andybalholm/cascadia v1.1.0 h1:BuuO6sSfQNFRu1LppgbD25Hr2vLYW25JvxHs5zzsLTo= +github.com/andybalholm/brotli v1.0.3 h1:fpcw+r1N1h0Poc1F/pHbW40cUm/lMEQslZtCkBQ0UnM= +github.com/andybalholm/brotli v1.0.3/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/andybalholm/cascadia v1.1.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y= +github.com/andybalholm/cascadia v1.2.0 h1:vuRCkM5Ozh/BfmsaTm26kbjm0mIOM3yS5Ek/F5h18aE= +github.com/andybalholm/cascadia v1.2.0/go.mod h1:YCyR8vOZT9aZ1CHEd8ap0gMVm2aFgxBp0T0eFw1RUQY= github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= -github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= -github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= -github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A= github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= github.com/asaskevich/govalidator v0.0.0-20200108200545-475eaeb16496/go.mod h1:oGkLhpf+kjZl6xBf758TQhh5XrAeiJv/7FRz/2spLIg= @@ -131,23 +127,24 @@ github.com/asaskevich/govalidator v0.0.0-20200428143746-21a406dcc535/go.mod h1:o github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d h1:Byv0BzEl3/e6D5CLfI0j/7hiIEtvGVFPCZ7Ei2oq8iQ= github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= -github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU= -github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48= -github.com/aws/aws-sdk-go v1.38.3/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro= -github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g= +github.com/aws/aws-sdk-go v1.38.17/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro= github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= +github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= +github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bits-and-blooms/bitset v1.2.0 h1:Kn4yilvwNtMACtf1eYDlG8H77R07mZSPbMjLyS07ChA= +github.com/bits-and-blooms/bitset v1.2.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA= github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84= github.com/blevesearch/bleve/v2 v2.0.1/go.mod h1:OBP2Pktqik8vEiUlGhuWjYx7KiO4zD542+DHqICwM5w= -github.com/blevesearch/bleve/v2 v2.0.3 h1:mDrwrsRIA4PDYkfUNjoh5zGECvquuJIA3MJU5ivaO8E= -github.com/blevesearch/bleve/v2 v2.0.3/go.mod h1:ip+4iafiEq2gCY5rJXe87bT6LkF/OJMCjQEYIfTBfW8= +github.com/blevesearch/bleve/v2 v2.0.6 h1:2dV2S4pyUqQHftUFzM0htUCWC8MeRg2qsmgIvjnKlgU= +github.com/blevesearch/bleve/v2 v2.0.6/go.mod h1:UhqLjgDhN4mji6F1dL3fPghcqaBV6r6bXwKCdaBa3Is= github.com/blevesearch/bleve_index_api v1.0.0 h1:Ds3XeuTxjXCkG6pgIwWDRyooJKNIuOKemnN0N0IkhTU= github.com/blevesearch/bleve_index_api v1.0.0/go.mod h1:fiwKS0xLEm+gBRgv5mumf0dhgFr2mDgZah1pqv1c1M4= github.com/blevesearch/go-porterstemmer v1.0.3 h1:GtmsqID0aZdCSNiY8SkuPJ12pD4jI+DdXTAn4YRcHCo= @@ -164,55 +161,47 @@ github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD github.com/blevesearch/snowballstem v0.9.0/go.mod h1:PivSj3JMc8WuaFkTSRDW2SlrulNWPl4ABg1tC/hlgLs= github.com/blevesearch/upsidedown_store_api v1.0.1 h1:1SYRwyoFLwG3sj0ed89RLtM15amfX2pXlYbFOnF8zNU= github.com/blevesearch/upsidedown_store_api v1.0.1/go.mod h1:MQDVGpHZrpe3Uy26zJBf/a8h0FZY6xJbthIMm8myH2Q= -github.com/blevesearch/vellum v1.0.3 h1:U86G41A7CtXNzzpIJHM8lSTUqz1Mp8U870TkcdCzZc8= -github.com/blevesearch/vellum v1.0.3/go.mod h1:2u5ax02KeDuNWu4/C+hVQMD6uLN4txH1JbtpaDNLJRo= +github.com/blevesearch/vellum v1.0.5 h1:L5dJ7hKauRVbuH7I8uqLeSK92CPPY6FfrbAmLhAug8A= +github.com/blevesearch/vellum v1.0.5/go.mod h1:atE0EH3fvk43zzS7t1YNdNC7DbmcC3uz+eMD5xZ2OyQ= github.com/blevesearch/zapx/v11 v11.1.10/go.mod h1:DTjbcBqrr/Uo82UBilDC8lEew42gN/OcIyiTNFtSijc= -github.com/blevesearch/zapx/v11 v11.2.0 h1:GBkCJYsyj3eIU4+aiLPxoMz1PYvDbQZl/oXHIBZIP60= -github.com/blevesearch/zapx/v11 v11.2.0/go.mod h1:gN/a0alGw1FZt/YGTo1G6Z6XpDkeOfujX5exY9sCQQM= +github.com/blevesearch/zapx/v11 v11.2.1 h1:udluDHdr99gGSeL3vZLtJbML0OJ98mK1Peivtm5OYho= +github.com/blevesearch/zapx/v11 v11.2.1/go.mod h1:TBkJF5Qq0EwZbbBQmkW6/AQVSYwXXpp0xwtQ5wXHVMI= github.com/blevesearch/zapx/v12 v12.1.10/go.mod h1:14NmKnPrnKAIyiEJM566k/Jk+FQpuiflT5d3uaaK3MI= -github.com/blevesearch/zapx/v12 v12.2.0 h1:dyRcSoZVO1jktL4UpGkCEF1AYa3xhKPirh4/N+Va+Ww= -github.com/blevesearch/zapx/v12 v12.2.0/go.mod h1:fdjwvCwWWwJW/EYTYGtAp3gBA0geCYGLcVTtJEZnY6A= +github.com/blevesearch/zapx/v12 v12.2.1 h1:nbeecR8M3dEcIIYfKDaSRpJ9E205E7BvjhVwf/l5ajI= +github.com/blevesearch/zapx/v12 v12.2.1/go.mod h1:sSXvgEs7MKqqDIRSpyFd6ZJUEVlhxuDB0d8/WT2WlgA= github.com/blevesearch/zapx/v13 v13.1.10/go.mod h1:YsVY6YGpTEAlJOMjdL7EsdBLvjWd8kPa2gwJDNpqLJo= -github.com/blevesearch/zapx/v13 v13.2.0 h1:mUqbaqQABp8nBE4t4q2qMyHCCq4sykoV8r7aJk4ih3s= -github.com/blevesearch/zapx/v13 v13.2.0/go.mod h1:o5rAy/lRS5JpAbITdrOHBS/TugWYbkcYZTz6VfEinAQ= +github.com/blevesearch/zapx/v13 v13.2.1 h1:6K797fvkurY6heEMPhyUlq3VULIpkD1sbBqqQUMFf4g= +github.com/blevesearch/zapx/v13 v13.2.1/go.mod h1:Fblcy4ykPy7XiaZ2svvpQaYgEqI+8vkdvMVx5zcawF4= github.com/blevesearch/zapx/v14 v14.1.10/go.mod h1:hsULl5eJSxs5NEfBsmeT9qrqdCP+/ecpVZKt60M4V64= -github.com/blevesearch/zapx/v14 v14.2.0 h1:UsfRqvM9RJxKNKrkR1U7aYc1cv9MWx719fsAjbF6joI= -github.com/blevesearch/zapx/v14 v14.2.0/go.mod h1:GNgZusc1p4ot040cBQMRGEZobvwjCquiEKYh1xLFK9g= +github.com/blevesearch/zapx/v14 v14.2.1 h1:V3RzDc7XZ51Kv9ZhhzMlHCSoY4+jxqy9VBqHxTqW4pg= +github.com/blevesearch/zapx/v14 v14.2.1/go.mod h1:veKtVCDzl4vvYeT5zULXEXqPR948uilzixzmmdtpCkU= github.com/blevesearch/zapx/v15 v15.1.10/go.mod h1:4ypq25bwtSQKzwEF1UERyIhmGTbMT3brY/n4NC5gRnM= -github.com/blevesearch/zapx/v15 v15.2.0 h1:ZpibwcrrOaeslkOw3sJ7npP7KDgRHI/DkACjKTqFwyM= -github.com/blevesearch/zapx/v15 v15.2.0/go.mod h1:MmQceLpWfME4n1WrBFIwplhWmaQbQqLQARpaKUEOs/A= +github.com/blevesearch/zapx/v15 v15.2.1 h1:ZaqQiWLo0srtPvy3ozgpR9+Oabs3HQrF4uJM0HiKVBY= +github.com/blevesearch/zapx/v15 v15.2.1/go.mod h1:pUCN72ZJkVd7dU9lA4Fd8E3+fl5wv3JPpThk4FQ5bpA= github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/boombuler/barcode v1.0.1 h1:NDBbPmhS+EqABEs5Kg3n/5ZNjy73Pz7SIV+KCeqyXcs= github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/bradfitz/gomemcache v0.0.0-20190329173943-551aad21a668/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA= github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b h1:L/QXpzIa3pOvUGt1D1lA5KjYhPBAN/3iWdP7xeFS9F0= github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA= -github.com/caddyserver/certmagic v0.13.0 h1:ky0rntZvIFiUKFdIikYxj31WN+Ts0Od6Wjz83iTzxfc= -github.com/caddyserver/certmagic v0.13.0/go.mod h1:dNOzF4iOB7H9E51xTooMB90vs+2XNVtpnx0liQNsQY4= -github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ= -github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= +github.com/caddyserver/certmagic v0.14.0 h1:XW1o32s7smIYEJSc6g+N8YXljpjRo5ZE2zi3CIYTs74= +github.com/caddyserver/certmagic v0.14.0/go.mod h1:oRQOZmUVKwlpgNidslysHt05osM9uMrJ4YMk+Ot4P4Q= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/chavacava/garif v0.0.0-20210405163807-87a70f3d418b/go.mod h1:Qjyv4H3//PWVzTeCezG2b9IRn6myJxJSr4TD/xo6ojU= -github.com/chavacava/garif v0.0.0-20210405164556-e8a0a408d6af h1:spmv8nSH9h5oCQf40jt/ufBCt9j0/58u4G+rkeMqXGI= -github.com/chavacava/garif v0.0.0-20210405164556-e8a0a408d6af/go.mod h1:Qjyv4H3//PWVzTeCezG2b9IRn6myJxJSr4TD/xo6ojU= github.com/chi-middleware/proxy v1.1.1 h1:4HaXUp8o2+bhHr1OhVy+VjN0+L7/07JDcn6v7YrTjrQ= github.com/chi-middleware/proxy v1.1.1/go.mod h1:jQwMEJct2tz9VmtCELxvnXoMfa+SOdikvbVJVHv/M+0= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= -github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= -github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= -github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= @@ -220,10 +209,8 @@ github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8Nz github.com/coreos/go-oidc v2.2.1+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc= github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= github.com/couchbase/ghistogram v0.1.0/go.mod h1:s1Jhy76zqfEecpNWJfWUiKZookAFaiGOEoyzgHt9i7k= github.com/couchbase/go-couchbase v0.0.0-20201026062457-7b3be89bbd89/go.mod h1:+/bddYDxXsf9qt0xpDUtRR47A2GjaXmGGAqQ/k3GJ8A= @@ -245,14 +232,13 @@ github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsr github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/cupcake/rdb v0.0.0-20161107195141-43ba34106c76/go.mod h1:vYwsqCOLxGiisLwp9rITslkFNpZD5rz43tf41QFkTWY= -github.com/daaku/go.zipexe v1.0.0/go.mod h1:z8IiR6TsVLEYKwXAoE/I+8ys/sDkgTzSL0CLnGVd57E= github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 h1:y5HC9v93H5EPKqaS1UYVg1uYah5Xf51mBfIoWehClUQ= github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9hchkHSWYkEqJwUGisez3G1QY8Ryz0sdWrLPMGjLk= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/denisenkom/go-mssqldb v0.0.0-20191128021309-1d7a30a10f73/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU= github.com/denisenkom/go-mssqldb v0.0.0-20200428022330-06a60b6afbbc/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU= +github.com/denisenkom/go-mssqldb v0.9.0/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU= github.com/denisenkom/go-mssqldb v0.10.0 h1:QykgLZBorFE95+gO3u9esLd0BmbvpWp0/waNNZfHBM8= github.com/denisenkom/go-mssqldb v0.10.0/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU= github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM= @@ -260,6 +246,11 @@ github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZm github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= +github.com/djherbis/buffer v1.1.0/go.mod h1:VwN8VdFkMY0DCALdY8o00d3IZ6Amz/UNVMWcSaJT44o= +github.com/djherbis/buffer v1.2.0 h1:PH5Dd2ss0C7CRRhQCZ2u7MssF+No9ide8Ye71nPHcrQ= +github.com/djherbis/buffer v1.2.0/go.mod h1:fjnebbZjCUpPinBRD+TDwXSOeNQ7fPQWLfGQqiAiUyE= +github.com/djherbis/nio/v3 v3.0.1 h1:6wxhnuppteMa6RHA4L81Dq7ThkZH8SwnDzXDYy95vB4= +github.com/djherbis/nio/v3 v3.0.1/go.mod h1:Ng4h80pbZFMla1yKzm61cF0tqqilXZYrogmWgZxOcmg= github.com/dlclark/regexp2 v1.1.6/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E= @@ -269,20 +260,15 @@ github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDD github.com/dsnet/compress v0.0.1 h1:PlZu0n3Tuv04TzpfPbrnI0HW/YwodEXDS+oPKahKF0Q= github.com/dsnet/compress v0.0.1/go.mod h1:Aw8dCMJ7RioblQeTqt88akK31OvO8Dhf5JflhBbQEHo= github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY= -github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= -github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= -github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= -github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= +github.com/dvyukov/go-fuzz v0.0.0-20210429054444-fca39067bc72/go.mod h1:11Gm+ccJnvAhCNLlf5+cS9KjtbaD5I5zaZpFMsTHWTw= github.com/editorconfig/editorconfig-core-go/v2 v2.4.2 h1:1lkDpSoAaFLrgYTVJ/eNCV+lkDSv/j9Wm0jcvDfVVEo= github.com/editorconfig/editorconfig-core-go/v2 v2.4.2/go.mod h1:IXeWRVO4LZRoNunhHh/oP6BQvTs94nB2pNvbw32l8tQ= github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M= -github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385 h1:clC1lXBpe2kTj2VHdaIu9ajZQe4kcEY9j0NsnDDBZ3o= -github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385/go.mod h1:0vRUJqYpeSZifjYj7uP3BG/gKcuzL9xWVV/Y+cK33KM= +github.com/elazarl/go-bindata-assetfs v1.0.1/go.mod h1:v+YaWX3bdea5J/mo8dSETolEo7R71Vk1u8bnjau5yw4= github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg= github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o= -github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4safvEdbitLhGGK48rN6g= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= @@ -292,24 +278,18 @@ github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7 github.com/ethantkoenig/rupture v1.0.0 h1:gPInt1N30UErGNzd8t5js5Qbnpjcd1l6yU2MCrJxIe8= github.com/ethantkoenig/rupture v1.0.0/go.mod h1:GyE9QabHfxA6ch0NZgwsHopRbOLcYjUr9g4FTJmq0WM= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fatih/color v1.10.0 h1:s36xzo75JdqLaaWoiEHk767eHiwo0598uUxyfiPkDsg= -github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= -github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= -github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= github.com/felixge/httpsnoop v1.0.1 h1:lvB5Jl89CsZtGIWuTcDM1E/vkVs49/Ml7JJe07l8SPQ= github.com/felixge/httpsnoop v1.0.1/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= -github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4= -github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= -github.com/gliderlabs/ssh v0.3.2 h1:gcfd1Aj/9RQxvygu4l3sak711f/5+VOwBw9C/7+N4EI= -github.com/gliderlabs/ssh v0.3.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= +github.com/gliderlabs/ssh v0.3.3 h1:mBQ8NiOgDkINJrZtoizkC3nDNYgSaWtxyem6S2XHBtA= +github.com/gliderlabs/ssh v0.3.3/go.mod h1:ZSS+CUoKHDrqVakTfTWUlKSr9MtMFkC4UvtQKD7O914= github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= github.com/glycerine/go-unsnap-stream v0.0.0-20181221182339-f9677308dec2/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE= @@ -324,25 +304,25 @@ github.com/go-chi/chi/v5 v5.0.1 h1:ALxjCrTf1aflOlkhMnCUP86MubbWFrzB3gkRPReLpTo= github.com/go-chi/chi/v5 v5.0.1/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= github.com/go-chi/cors v1.2.0 h1:tV1g1XENQ8ku4Bq3K9ub2AtgG+p16SmzeMSGTwrOKdE= github.com/go-chi/cors v1.2.0/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58= -github.com/go-enry/go-enry/v2 v2.6.1 h1:ckFkMVj2NeHpaQDFDiSjanVjNy2IiuMNivhXDB4c5Q0= -github.com/go-enry/go-enry/v2 v2.6.1/go.mod h1:GVzIiAytiS5uT/QiuakK7TF1u4xDab87Y8V5EJRpsIQ= +github.com/go-enry/go-enry/v2 v2.7.1 h1:WCqtfyteIz61GYk9lRVy8HblvIv4cP9GIiwm/6txCbU= +github.com/go-enry/go-enry/v2 v2.7.1/go.mod h1:GVzIiAytiS5uT/QiuakK7TF1u4xDab87Y8V5EJRpsIQ= github.com/go-enry/go-oniguruma v1.2.1 h1:k8aAMuJfMrqm/56SG2lV9Cfti6tC4x8673aHCcBk+eo= github.com/go-enry/go-oniguruma v1.2.1/go.mod h1:bWDhYP+S6xZQgiRL7wlTScFYBe023B6ilRZbCAD5Hf4= github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4= github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E= -github.com/go-git/go-billy/v5 v5.0.0/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0= -github.com/go-git/go-billy/v5 v5.1.0 h1:4pl5BV4o7ZG/lterP4S6WzJ6xr49Ba5ET9ygheTYahk= -github.com/go-git/go-billy/v5 v5.1.0/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0= -github.com/go-git/go-git-fixtures/v4 v4.0.2-0.20200613231340-f56387b50c12 h1:PbKy9zOy4aAKrJ5pibIRpVO2BXnK1Tlcg+caKI7Ox5M= -github.com/go-git/go-git-fixtures/v4 v4.0.2-0.20200613231340-f56387b50c12/go.mod h1:m+ICp2rF3jDhFgEZ/8yziagdT1C+ZpZcrJjappBCDSw= -github.com/go-git/go-git/v5 v5.3.0 h1:8WKMtJR2j8RntEXR/uvTKagfEt4GYlwQ7mntE4+0GWc= -github.com/go-git/go-git/v5 v5.3.0/go.mod h1:xdX4bWJ48aOrdhnl2XqHYstHbbp6+LFS4r4X+lNVprw= +github.com/go-git/go-billy/v5 v5.2.0/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0= +github.com/go-git/go-billy/v5 v5.3.1 h1:CPiOUAzKtMRvolEKw+bG1PLRpT7D3LIs3/3ey4Aiu34= +github.com/go-git/go-billy/v5 v5.3.1/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0= +github.com/go-git/go-git-fixtures/v4 v4.2.1 h1:n9gGL1Ct/yIw+nfsfr8s4+sbhT+Ncu2SubfXjIWgci8= +github.com/go-git/go-git-fixtures/v4 v4.2.1/go.mod h1:K8zd3kDUAykwTdDCr+I0per6Y6vMiRR/nnVTBtavnB0= +github.com/go-git/go-git/v5 v5.4.3-0.20210630082519-b4368b2a2ca4 h1:1RSUwVK7VjTeA82kcLIqz1EU70QRwFdZUlJW58gP4GY= +github.com/go-git/go-git/v5 v5.4.3-0.20210630082519-b4368b2a2ca4/go.mod h1:gQ1kArt6d+n+BGd+/B/I74HwRTLhth2+zti4ihgckDc= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/kit v0.10.0/go.mod h1:xUsJbQ/Fp4kEt7AFgCuvyX4a71u8h9jB8tj/ORgOZ7o= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= github.com/go-ldap/ldap/v3 v3.3.0 h1:lwx+SJpgOHd8tG6SumBQZXCmNX51zM8B1cfxJ5gv4tQ= github.com/go-ldap/ldap/v3 v3.3.0/go.mod h1:iYS1MdmrmceOJ1QOTnRXrIs7i3kloqtmGQjRvjKpyMg= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= @@ -446,9 +426,8 @@ github.com/go-openapi/validate v0.20.2/go.mod h1:e7OJoKNgd0twXZwIn0A43tHbvIcr/rZ github.com/go-redis/redis v6.15.2+incompatible h1:9SpNVG76gr6InJGxoZ6IuuxaCOQwDAhzyXg+Bs+0Sb4= github.com/go-redis/redis v6.15.2+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= github.com/go-redis/redis/v8 v8.4.0/go.mod h1:A1tbYoHSa1fXwN+//ljcCYYJeLmVrwL9hbQN45Jdy0M= -github.com/go-redis/redis/v8 v8.8.2 h1:O/NcHqobw7SEptA0yA6up6spZVFtwE06SXM8rgLtsP8= -github.com/go-redis/redis/v8 v8.8.2/go.mod h1:F7resOH5Kdug49Otu24RjHWwgK7u9AmtqWMnCV1iP5Y= -github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= +github.com/go-redis/redis/v8 v8.11.0 h1:O1Td0mQ8UFChQ3N9zFQqo6kTU2cJ+/it88gDB+zg0wo= +github.com/go-redis/redis/v8 v8.11.0/go.mod h1:DLomh7y2e3ggQXQLd1YgmvIfecPJoFl7WU5SOQ/r06M= github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= @@ -459,8 +438,8 @@ github.com/go-swagger/go-swagger v0.27.0 h1:K7+nkBuf4oS1jTBrdvWqYFpqD69V5CN8HamZ github.com/go-swagger/go-swagger v0.27.0/go.mod h1:WodZVysInJilkW7e6IRw+dZGp5yW6rlMFZ4cb+THl9A= github.com/go-swagger/scan-repo-boundary v0.0.0-20180623220736-973b3573c013 h1:l9rI6sNaZgNC0LnF3MiE+qTmyBA/tZAg1rtyrGbUMK0= github.com/go-swagger/scan-repo-boundary v0.0.0-20180623220736-973b3573c013/go.mod h1:b65mBPzqzZWxOZGxSWrqs4GInLIn+u99Q9q7p+GKni0= -github.com/go-testfixtures/testfixtures/v3 v3.6.0 h1:fHrJWcZ0TOHA0UcExV0Nwx+5MR9QXVDWYdVfwe4DfmM= -github.com/go-testfixtures/testfixtures/v3 v3.6.0/go.mod h1:YUBpgqvleDRhkx4MQbzdA7A3G5ca2wLtf9bHbDqNaRQ= +github.com/go-testfixtures/testfixtures/v3 v3.6.1 h1:n4Fv95Exp0D05G6l6CAZv22Ck1EJK0pa0TfPqE4ncSs= +github.com/go-testfixtures/testfixtures/v3 v3.6.1/go.mod h1:Bsb2MoHAfHnNsPpSwAjtOs102mqDuM+1u3nE2OCi0N0= github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0= github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY= github.com/gobuffalo/depgen v0.1.0/go.mod h1:+ifsuy7fhi15RWncXQQKjWS9JPkdah5sZvtHc2RXGlg= @@ -489,9 +468,7 @@ github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= github.com/gofrs/uuid v3.2.0+incompatible h1:y12jRkkFxsd7GpqdSZ+/KCs/fJbqpEXSGd4+jfEaewE= github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= -github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= -github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= github.com/gogs/chardet v0.0.0-20191104214054-4b6791f73a28 h1:gBeyun7mySAKWg7Fb0GOcv0upX9bdaZScs8QcRo8mEY= github.com/gogs/chardet v0.0.0-20191104214054-4b6791f73a28/go.mod h1:Pcatq5tYkCW2Q6yrR2VRHlbHpZ/R4/7qyL1TCF7vl14= @@ -502,7 +479,6 @@ github.com/gogs/go-gogs-client v0.0.0-20210131175652-1d7215cd8d85/go.mod h1:fR6z github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe h1:lXe2qZdvpiX5WZkZR4hgp4KJVfY3nMkvmwbVkpv1rVY= github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= @@ -534,8 +510,9 @@ github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiu github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.2/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/golang/snappy v0.0.3 h1:fHPg5GQYlCeLIPB9BZqMVR5nR9A+IM5zcgeTdjMYmLA= github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -548,8 +525,9 @@ github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-github/v32 v32.1.0 h1:GWkQOdXqviCPx7Q7Fj+KyPoGm4SwHRh8rheoPhd27II= github.com/google/go-github/v32 v32.1.0/go.mod h1:rIEpZD9CTDQwDK9GDrtMTycQNA4JU3qBsCizh3q2WCI= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= @@ -584,14 +562,11 @@ github.com/gopherjs/gopherjs v0.0.0-20190910122728-9d188e94fb99 h1:twflg0XRTjwKp github.com/gopherjs/gopherjs v0.0.0-20190910122728-9d188e94fb99/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gorilla/context v1.1.1 h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8= github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= -github.com/gorilla/csrf v1.6.0/go.mod h1:7tSf8kmjNYr7IWDCYhd3U8Ck34iQ/Yw5CJu7bAkHEGI= github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY= github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= -github.com/gorilla/handlers v1.4.1/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ= github.com/gorilla/handlers v1.5.1 h1:9lRY6j8DEeeBT10CvO9hGW0gmky0BprnvDI5vfhUHH4= github.com/gorilla/handlers v1.5.1/go.mod h1:t8XrUpc4KVXb7HGyJ4/cEnwQiaxrX/hz1Zv/4g96P1Q= github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= -github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= github.com/gorilla/pat v0.0.0-20180118222023-199c85a7f6d1 h1:LqbZZ9sNMWVjeXS4NN5oVvhMjDyLhmA1LG86oSo+IqY= @@ -602,17 +577,12 @@ github.com/gorilla/sessions v1.1.1/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE github.com/gorilla/sessions v1.2.0/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI= github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= -github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= -github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= -github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= -github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE= github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= -github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= @@ -622,8 +592,9 @@ github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrj github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= -github.com/hashicorp/go-retryablehttp v0.6.8 h1:92lWxgpa+fF3FozM4B3UZtHZMJX8T5XT+TFdCxsPyWs= github.com/hashicorp/go-retryablehttp v0.6.8/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= +github.com/hashicorp/go-retryablehttp v0.7.0 h1:eu1EI/mbirUgP5C8hVsTNaGZreBDlYiwC1FZWkvQPQ4= +github.com/hashicorp/go-retryablehttp v0.7.0/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= @@ -632,6 +603,8 @@ github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/b github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= @@ -641,13 +614,11 @@ github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/J github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/huandu/xstrings v1.3.2 h1:L18LIDzqlW6xN2rEkpdV8+oL/IXWJ1APd+vsdYy4Wdw= github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= -github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU= github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo= github.com/issue9/assert v1.4.1 h1:gUtOpMTeaE4JTe9kACma5foOHBvVt1p5XTFrULDwdXI= github.com/issue9/assert v1.4.1/go.mod h1:Yktk83hAVl1SPSYtd9kjhBizuiBIqUQyj+D5SE2yjVY= github.com/issue9/identicon v1.2.0 h1:ek+UcTTyMW/G0iNbLOAlrPC13eSzXTWhbJSs8PHhHGQ= @@ -698,10 +669,8 @@ github.com/jaytaylor/html2text v0.0.0-20200412013138-3577fbdbcff7 h1:g0fAGBisHaE github.com/jaytaylor/html2text v0.0.0-20200412013138-3577fbdbcff7/go.mod h1:CVKlgaMiht+LXvHG173ujK6JUhZXKb2u/BQtjPDIvyk= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= -github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jessevdk/go-flags v1.5.0 h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LFvc= github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4= -github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc= @@ -711,10 +680,9 @@ github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8Hm github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.11 h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= @@ -737,16 +705,17 @@ github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+o github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.10.10/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= -github.com/klauspost/compress v1.12.1 h1:/+xsCsk06wE38cyiqOR/o7U2fSftcH72xD+BQXmja/g= -github.com/klauspost/compress v1.12.1/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= +github.com/klauspost/compress v1.13.1 h1:wXr2uRxZTJXHLly6qhJabee5JqIhTRoLBhDOA74hDEQ= +github.com/klauspost/compress v1.13.1/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= github.com/klauspost/cpuid v1.2.3/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= -github.com/klauspost/cpuid v1.2.5/go.mod h1:bYW4mA6ZgKPob1/Dlai2LviZJO7KGI3uoWLd42rAQw4= github.com/klauspost/cpuid v1.3.1 h1:5JNjFYYQrZeKRJ0734q51WCEEn2huer72Dc7K+R/b6s= github.com/klauspost/cpuid v1.3.1/go.mod h1:bYW4mA6ZgKPob1/Dlai2LviZJO7KGI3uoWLd42rAQw4= github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= -github.com/klauspost/cpuid/v2 v2.0.4 h1:g0I61F2K2DjRHz1cnxlkNSBIaePVoJIjjnHui8QHbiw= github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.0.6/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.0.8 h1:bhR2mgIlno/Sfk4oUbH4sPlc83z1yGrN9bvqiq3C33I= +github.com/klauspost/cpuid/v2 v2.0.8/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/pgzip v1.2.4/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= github.com/klauspost/pgzip v1.2.5 h1:qnWYvvKqedOF2ulHpMG72XQol4ILEJ8k2wwRl/Km8oE= github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= @@ -772,18 +741,14 @@ github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.7.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/lib/pq v1.10.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/lib/pq v1.10.1 h1:6VXZrLU0jHBYyAqrSPa+MgPfnSvTPuMgK+k0o5kVFWo= -github.com/lib/pq v1.10.1/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/libdns/libdns v0.2.0 h1:ewg3ByWrdUrxrje8ChPVMBNcotg7H9LQYg+u5De2RzI= -github.com/libdns/libdns v0.2.0/go.mod h1:yQCXzk1lEZmmCPa857bnk4TsOiqYasqpyOEeSObbb40= -github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM= -github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4= +github.com/lib/pq v1.10.2 h1:AqzbZs4ZoCBp+GtejcpCpcxM3zlSMx29dXbUSeVtJb8= +github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/libdns/libdns v0.2.1 h1:Wu59T7wSHRgtA0cfxC+n1c/e+O3upJGWytknkmFEDis= +github.com/libdns/libdns v0.2.1/go.mod h1:yQCXzk1lEZmmCPa857bnk4TsOiqYasqpyOEeSObbb40= github.com/lunny/dingtalk_webhook v0.0.0-20171025031554-e3534c89ef96 h1:uNwtsDp7ci48vBTTxDuwcoTXz4lwtDTe7TjCQ0noaWY= github.com/lunny/dingtalk_webhook v0.0.0-20171025031554-e3534c89ef96/go.mod h1:mmIfjCSQlGYXmJ95jFN84AkQFnVABtKuJL8IrzwvUKQ= github.com/lunny/log v0.0.0-20160921050905-7887c61bf0de/go.mod h1:3q8WtuPQsoRbatJuy3nvq/hRSvuBJrHHr+ybPPiNvHQ= github.com/lunny/nodb v0.0.0-20160621015157-fc1ef06ad4af/go.mod h1:Cqz6pqow14VObJ7peltM+2n3PWOz7yTrfUuGbVFkzN0= -github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls= @@ -801,49 +766,46 @@ github.com/markbates/goth v1.67.1 h1:gU5B0pzHVyhnJPwGynfFnkfvaQ39C1Sy+ewdl+bhAOw github.com/markbates/goth v1.67.1/go.mod h1:EyLFHGU5ySr2GXRDyJH5nu2dA7parbC8QwIYW/rGcWg= github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= +github.com/matryer/is v1.2.0 h1:92UTHpy8CDwaJ08GqLDzhhuixiBUUD1p3AU6PHddz4A= +github.com/matryer/is v1.2.0/go.mod h1:2fLPjFQM9rhQ15aVEtbuwhJinnOqrmgXPNdZsdwlWXA= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8= -github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= -github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= +github.com/mattn/go-isatty v0.0.13 h1:qdl+GuBjcsKKDco5BsxPJlId98mSWNKqYA+Co0SC1yA= +github.com/mattn/go-isatty v0.0.13/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= -github.com/mattn/go-runewidth v0.0.12 h1:Y41i/hVW3Pgwr8gV+J23B9YEY0zxjptBuCWEaxmAOow= -github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= +github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU= +github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/mattn/go-sqlite3 v1.11.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/go-sqlite3 v1.14.0/go.mod h1:JIl7NbARA7phWnGvh0LKTyg7S9BA+6gx71ShQilpsus= +github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/mattn/go-sqlite3 v1.14.7 h1:fxWBnXkxfM6sRiuH3bqJ4CfzZojMOLVc0UTsTglEghA= github.com/mattn/go-sqlite3 v1.14.7/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/mgechev/dots v0.0.0-20190921121421-c36f7dcfbb81 h1:QASJXOGm2RZ5Ardbc86qNFvby9AqkLDibfChMtAg5QM= -github.com/mgechev/dots v0.0.0-20190921121421-c36f7dcfbb81/go.mod h1:KQ7+USdGKfpPjXk4Ga+5XxQM4Lm4e3gAogrreFAYpOg= -github.com/mgechev/revive v1.0.6 h1:MgRQ3ys2uQCyVjelaDhVs8oSvOPYInzGA/nNGMa+MNU= -github.com/mgechev/revive v1.0.6/go.mod h1:Lj5gIVxjBlH8REa3icEOkdfchwYc291nShzZ4QYWyMo= github.com/mholt/acmez v0.1.3 h1:J7MmNIk4Qf9b8mAGqAh4XkNeowv3f1zW816yf4zt7Qk= github.com/mholt/acmez v0.1.3/go.mod h1:8qnn8QA/Ewx8E3ZSsmscqsIjhhpxuy9vqdgbX2ceceM= github.com/mholt/archiver/v3 v3.5.0 h1:nE8gZIrw66cu4osS/U7UW7YDuGMHssxKutU8IfWxwWE= github.com/mholt/archiver/v3 v3.5.0/go.mod h1:qqTTPUK/HZPFgFQ/TJ3BzvTpF/dPtFVJXdQbCmeMxwc= -github.com/microcosm-cc/bluemonday v1.0.8 h1:JGc6zQRHqlp+UlLrsbUbbp0mOaJLV44vvQmBSU0Sfj0= -github.com/microcosm-cc/bluemonday v1.0.8/go.mod h1:HOT/6NaBlR0f9XlxD3zolN6Z3N8Lp4pvhp+jLS5ihnI= +github.com/microcosm-cc/bluemonday v1.0.15 h1:J4uN+qPng9rvkBZBoBb8YGR+ijuklIMpSOZZLjYpbeY= +github.com/microcosm-cc/bluemonday v1.0.15/go.mod h1:ZLvAzeakRwrGnzQEvstVzVt3ZpqOF2+sdFr0Om+ce30= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= -github.com/miekg/dns v1.1.30/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM= -github.com/miekg/dns v1.1.40 h1:pyyPFfGMnciYUk/mXpKkVmeMQjfXqt3FAJ2hy7tPiLA= -github.com/miekg/dns v1.1.40/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM= +github.com/miekg/dns v1.1.42/go.mod h1:+evo5L0630/F6ca/Z9+GAqzhjGyn8/c+TBaOyfEl0V4= +github.com/miekg/dns v1.1.43 h1:JKfpVSCB84vrAmHzyrsxB5NAr5kLoMXZArPSw7Qlgyg= +github.com/miekg/dns v1.1.43/go.mod h1:+evo5L0630/F6ca/Z9+GAqzhjGyn8/c+TBaOyfEl0V4= github.com/minio/md5-simd v1.1.0/go.mod h1:XpBqgZULrMYD3R+M28PcmP0CkI7PEMzB3U77ZrKZ0Gw= github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34= github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM= -github.com/minio/minio-go/v7 v7.0.10 h1:1oUKe4EOPUEhw2qnPQaPsJ0lmVTYLFu03SiItauXs94= -github.com/minio/minio-go/v7 v7.0.10/go.mod h1:td4gW1ldOsj1PbSNS+WYK43j+P1XVhX/8W8awaYlBFo= +github.com/minio/minio-go/v7 v7.0.12 h1:/4pxUdwn9w0QEryNkrrWaodIESPRX+NxpO0Q6hVdaAA= +github.com/minio/minio-go/v7 v7.0.12/go.mod h1:S23iSP5/gbMwtxeY5FM71R+TkAYyzEdoNEDDwpt8yWs= github.com/minio/sha256-simd v0.1.1/go.mod h1:B5e1o+1/KgNmWrSQK08Y6Z1Vb5pwIktudl0J58iy0KM= github.com/minio/sha256-simd v1.0.0 h1:v1ta+49hkWZyvaKwrQB8elexRqm6Y0aMLjCNsrYxo6g= github.com/minio/sha256-simd v1.0.0/go.mod h1:OuYzVNI5vcoYIAmbIvHPl3N3jUzVedXbKy5RFepssQM= @@ -878,34 +840,23 @@ github.com/msteinert/pam v0.0.0-20201130170657-e61372126161 h1:XQ1+fYPzaWZCVdu1x github.com/msteinert/pam v0.0.0-20201130170657-e61372126161/go.mod h1:np1wUFZ6tyoke22qDJZY40URn9Ae51gX7ljIWXN5TJs= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= -github.com/nats-io/jwt v0.3.2/go.mod h1:/euKqTS1ZD+zzjYrY7pseZrTtWQSjujC7xjPc8wL6eU= -github.com/nats-io/nats-server/v2 v2.1.2/go.mod h1:Afk+wRZqkMQs/p45uXdrVLuab3gwv3Z8C4HTBu8GD/k= -github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzEE/Zbp4w= -github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= -github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= -github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6Oo2LfFZAehjjQMERAvZLEDnQ= github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/niklasfasching/go-org v1.5.0 h1:V8IwoSPm/d61bceyWFxxnQLtlvNT+CjiYIhtZLdnMF0= github.com/niklasfasching/go-org v1.5.0/go.mod h1:sSb8ylwnAG+h8MGFDB3R1D5bxf8wA08REfhjShg3kjA= -github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229/go.mod h1:0aYXnNPJ8l7uZxf45rWW1a/uME32OF0rhiYGNQ2oF2E= github.com/nwaples/rardecode v1.1.0 h1:vSxaY8vQhOcVr4mm5e8XllHWTiM4JF507A0Katqw7MQ= github.com/nwaples/rardecode v1.1.0/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0= github.com/nxadm/tail v1.4.4 h1:DQuhQpB1tVlglWS2hLQ5OV6B5r8aGxSrPc5Qo6uTN78= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= -github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs= -github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= -github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= github.com/oliamb/cutter v0.2.2 h1:Lfwkya0HHNU1YLnGv2hTkzHfasrSMkgv4Dn+5rmlk3k= github.com/oliamb/cutter v0.2.2/go.mod h1:4BenG2/4GuRBDbVm/OPahDVqbrOemzpPiG5mi1iryBU= -github.com/olivere/elastic/v7 v7.0.24 h1:9ZcCQP3Pvgese7TaypYiVAL49sCEphyIwkVxtRf8jb8= -github.com/olivere/elastic/v7 v7.0.24/go.mod h1:OuWmD2DiuYhddWegBKPWQuelVKBLrW0fa/VUYgxuGTY= +github.com/olivere/elastic/v7 v7.0.25 h1:q3ef8PqC4PyT3b8BAcjDVo48KNzr0HVKosMqMsF+oME= +github.com/olivere/elastic/v7 v7.0.25/go.mod h1:ySKeM+7yrE9HmsUi6+vSp0anvWiDOuPa9kpuknxjKbU= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= @@ -920,17 +871,7 @@ github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1y github.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDsH8xc= github.com/onsi/gomega v1.10.5 h1:7n6FEkpFmfCoo2t+YYqXH0evK+a9ICQz0xcAy9dYcaQ= github.com/onsi/gomega v1.10.5/go.mod h1:gza4q3jKQJijlu05nKWRCW/GavJumGt8aNRxWg7mt48= -github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= -github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis= -github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74= -github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= -github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= -github.com/openzipkin-contrib/zipkin-go-opentracing v0.4.5/go.mod h1:/wsWhb9smxSfWAKL3wpBW7V8scJMt8N8gnaMCS9E/cA= -github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw= -github.com/openzipkin/zipkin-go v0.2.1/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= -github.com/openzipkin/zipkin-go v0.2.2/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= -github.com/pact-foundation/pact-go v1.0.4/go.mod h1:uExwJY4kCzNPcHRj+hCR/HBbOOIwwtUjcrb0b5/5kLM= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= @@ -939,19 +880,14 @@ github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAv github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc= github.com/pelletier/go-toml v1.9.0 h1:NOd0BRdOKpPf0SxkL3HxSQOG7rNh+4kl6PHcBPFs7Q0= github.com/pelletier/go-toml v1.9.0/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= -github.com/performancecopilot/speed v3.0.0+incompatible/go.mod h1:/CLtqpZ5gBg1M9iaPbIdPPGyKcA8hKdoy6hAWba7Yac= github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU= -github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc= -github.com/pierrec/lz4 v2.0.5+incompatible h1:2xWsjqPFWcplujydGg4WmhC/6fZqK42wMM8aXeqhl0I= -github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= github.com/pierrec/lz4/v4 v4.0.3/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= -github.com/pierrec/lz4/v4 v4.1.3 h1:/dvQpkb0o1pVlSgKNQqfkavlnXaIK+hJ0LXsKRUN9D4= -github.com/pierrec/lz4/v4 v4.1.3/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pierrec/lz4/v4 v4.1.8 h1:ieHkV+i2BRzngO4Wd/3HGowuZStgq6QkPsD1eolNAO4= +github.com/pierrec/lz4/v4 v4.1.8/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA= github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= @@ -960,51 +896,45 @@ github.com/pquerna/cachecontrol v0.0.0-20201205024021-ac21108117ac/go.mod h1:hoL github.com/pquerna/otp v1.3.0 h1:oJV/SkzR33anKXwQU3Of42rL4wbrffP4uvUf1SvS5Xs= github.com/pquerna/otp v1.3.0/go.mod h1:dkJfzwRKNiegxyNb54X/3fLwhCynbMspSyWKnvi1AEg= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= -github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs= github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= -github.com/prometheus/client_golang v1.3.0/go.mod h1:hJaj2vgQTGQmVCsAACORcieXFeDPbaTKGT+JTgUa3og= github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= -github.com/prometheus/client_golang v1.10.0 h1:/o0BDeWzLWXNZ+4q5gXltUvaMpJqckTa+jTNoB+z4cg= -github.com/prometheus/client_golang v1.10.0/go.mod h1:WJM3cc3yu7XKBKa/I8WeZm+V3eltZnBwfENSU7mdogU= +github.com/prometheus/client_golang v1.11.0 h1:HNkLOAEQMIDv/K+04rukrLx6ch7msSRwf3/SASFAGtQ= +github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= -github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.1.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= -github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= -github.com/prometheus/common v0.7.0/go.mod h1:DjGbpBbp5NYNiECxcL/VnbXCCaQpKd3tt26CguLLsqA= github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= -github.com/prometheus/common v0.18.0 h1:WCVKW7aL6LEe1uryfI9dnEc2ZqNB1Fn0ok930v0iL1Y= -github.com/prometheus/common v0.18.0/go.mod h1:U+gB1OBLb1lF3O42bTCL+FK18tX9Oar16Clt/msog/s= +github.com/prometheus/common v0.26.0 h1:iMAkS2TDoNWnKM+Kopnx/8tnEStIfpYA0ur0xQzzhMQ= +github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= -github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= github.com/prometheus/procfs v0.6.0 h1:mxy4L2jP6qMonqmq+aTtOx1ifVWUgG/TAmntgbh3xv4= github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= github.com/quasoft/websspi v1.0.0 h1:5nDgdM5xSur9s+B5w2xQ5kxf5nUGqgFgU4W0aDLZ8Mw= github.com/quasoft/websspi v1.0.0/go.mod h1:HmVdl939dQ0WIXZhyik+ARdI03M6bQzaSEKcgpFmewk= -github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= -github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 h1:OdAsTTz6OkFY5QxjkYwrChwuRruF69c169dPK26NUlk= +github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/robertkrimen/godocdown v0.0.0-20130622164427-0bfa04905481/go.mod h1:C9WhFzY47SzYBIvzFqSvHIR6ROgDo4TtdTuRaOMjF/s= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.5.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rs/xid v1.2.1 h1:mhH9Nq+C1fY2l1XIpgxIiUOfNpRBYH1kKcr+qfKgjRc= github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= +github.com/rs/xid v1.3.0 h1:6NjYksEUlhurdVehpc7S7dk6DAmcKv8V9gG0FsVN2U4= +github.com/rs/xid v1.3.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo= @@ -1013,7 +943,6 @@ github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQD github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= @@ -1037,6 +966,8 @@ github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMB github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= +github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/assertions v0.0.0-20190116191733-b6c0e53d7304/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/assertions v1.1.1 h1:T/YLemO5Yp7KPzS+lVtu+WsHn8yoSwTfItdAd1r3cck= @@ -1048,7 +979,6 @@ github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIK github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/smartystreets/gunit v1.4.2/go.mod h1:ZjM1ozSIMJlAz/ay4SG8PeKF00ckUp+zMHZXV9/bvak= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= -github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY= @@ -1062,7 +992,6 @@ github.com/spf13/cobra v1.1.3/go.mod h1:pGADOWyqRD/YMrPZigI/zbliZ2wVD/23d+is3pSW github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= -github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= @@ -1072,11 +1001,9 @@ github.com/spf13/viper v1.7.1 h1:pM5oEahlgWv/WnHXpgbKz7iLIxRf65tye2Ci+XFK5sk= github.com/spf13/viper v1.7.1/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf h1:pvbZ0lM0XWPBqUKqFU8cmavspvIl9nulOYwdy6IFRRo= github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf/go.mod h1:RJID2RhlZKId02nZ62WenDCkgHFerpIOmW0iT7GKmXM= +github.com/stephens2424/writerset v1.0.2/go.mod h1:aS2JhsMn6eA7e82oNmW4rfsgAOp9COBTTl8mzkwADnc= github.com/steveyen/gtreap v0.1.0 h1:CjhzTa274PyJLJuMZwIzCO1PfC00oRa8d1Kc78bFXJM= github.com/steveyen/gtreap v0.1.0/go.mod h1:kl/5J7XbrOmlIbYIXdRHDDE5QxHqpk0cmkT7Z4dM9/Y= -github.com/streadway/amqp v0.0.0-20190404075320-75d898a42a94/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= -github.com/streadway/amqp v0.0.0-20190827072141-edfb9018d271/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= -github.com/streadway/handy v0.0.0-20190108123426-d5acb3125c2a/go.mod h1:qNTQ5P5JnDBl6z3cMAg/SywNDC5ABu5ApDIw6lUbRmI= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= @@ -1094,7 +1021,6 @@ github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpP github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/tinylib/msgp v1.1.0/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE= -github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/toqueteos/webbrowser v1.2.0 h1:tVP/gpK69Fx+qMJKsLE7TD8LuGWPnEV71wBN9rrstGQ= github.com/toqueteos/webbrowser v1.2.0/go.mod h1:XWoZq4cyp9WeUeak7w7LXRUQf1F1ATJMir8RTqb4ayM= @@ -1112,20 +1038,14 @@ github.com/unknwon/i18n v0.0.0-20210321134014-0ebbf2df1c44 h1:7bSo/vjZKVYUoZfxpY github.com/unknwon/i18n v0.0.0-20210321134014-0ebbf2df1c44/go.mod h1:+5rDk6sDGpl3azws3O+f+GpFSyN9GVr0K8cvQLQM2ZQ= github.com/unknwon/paginater v0.0.0-20200328080006-042474bd0eae h1:ihaXiJkaca54IaCSnEXtE/uSZOmPxKZhDfVLrzZLFDs= github.com/unknwon/paginater v0.0.0-20200328080006-042474bd0eae/go.mod h1:1fdkY6xxl6ExVs2QFv7R0F5IRZHKA8RahhB9fMC9RvM= -github.com/unrolled/render v1.1.0 h1:gvpR9hHxTt6DcGqRYuVVFcfd8rtK+nyEPUJN06KB57Q= -github.com/unrolled/render v1.1.0/go.mod h1:gN9T0NhL4Bfbwu8ann7Ry/TGHYfosul+J0obPf6NBdM= -github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= -github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/unrolled/render v1.4.0 h1:p73obhpsXuE3paXOtcuXTBKgBJpLCfmABnsUiO35x+Q= +github.com/unrolled/render v1.4.0/go.mod h1:cK4RSTTVdND5j9EYEc0LAMOvdG11JeiKjyjfyZRvV2w= github.com/urfave/cli v1.22.5 h1:lNq9sAHXK2qfdI8W+GRItjCEkI+2oR4d+MEHy1CKXoU= github.com/urfave/cli v1.22.5/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= -github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= -github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= github.com/vektah/gqlparser v1.1.2/go.mod h1:1ycwN7Ij5njmMkPPAOaRFY4rET2Enx7IkVv3vaXspKw= github.com/willf/bitset v1.1.10/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4= -github.com/willf/bitset v1.1.11 h1:N7Z7E9UvjW+sGsEl7k/SJrvY2reP1A07MrGuCjIOjRE= -github.com/willf/bitset v1.1.11/go.mod h1:83CECat5yLh5zVOf4P1ErAgKA5UDvKtgyUABdr3+MjI= -github.com/xanzy/go-gitlab v0.48.0 h1:RP9r4pMDIwE2fbtc+QYiC1euDsPGHcAjPkhje4X3QPU= -github.com/xanzy/go-gitlab v0.48.0/go.mod h1:UW8JJbyBbqtOyBYNHRo261IRdHUFJr2m0y0z1xUiu+E= +github.com/xanzy/go-gitlab v0.50.1 h1:eH1G0/ZV1j81rhGrtbcePjbM5Ern7mPA4Xjt+yE+2PQ= +github.com/xanzy/go-gitlab v0.50.1/go.mod h1:Q+hQhV508bDPoBijv7YjK/Lvlb4PhVhJdKqXVQrUoAE= github.com/xanzy/ssh-agent v0.3.0 h1:wUMzuKtKilRgBAD1sUb8gOwwRr2FGoBVumcjoOACClI= github.com/xanzy/ssh-agent v0.3.0/go.mod h1:3s9xbODqPuuhK9JV1R321M/FlMZSBvE5aY6eAcqrDh0= github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= @@ -1140,24 +1060,23 @@ github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1: github.com/yohcop/openid-go v1.0.0 h1:EciJ7ZLETHR3wOtxBvKXx9RV6eyHZpCaSZ1inbBaUXE= github.com/yohcop/openid-go v1.0.0/go.mod h1:/408xiwkeItSPJZSTPF7+VtZxPkPrRRpRNK2vjGh6yI= github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= -github.com/yuin/goldmark v1.1.22/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.3.5 h1:dPmz1Snjq0kmkz159iL7S6WzdahUTHnHB5M56WFVifs= -github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -github.com/yuin/goldmark-highlighting v0.0.0-20200307114337-60d527fdb691 h1:VWSxtAiQNh3zgHJpdpkpVYjTPqRE3P6UZCOPa1nRDio= -github.com/yuin/goldmark-highlighting v0.0.0-20200307114337-60d527fdb691/go.mod h1:YLF3kDffRfUH/bTxOxHhV6lxwIB3Vfj91rEwNMS9MXo= +github.com/yuin/goldmark v1.3.6/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.0 h1:OtISOGfH6sOWa1/qXqqAiOIAO6Z5J3AEAE18WAq6BiQ= +github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark-highlighting v0.0.0-20210516132338-9216f9c5aa01 h1:0SJnXjE4jDClMW6grE0xpNhwpqbPwkBTn8zpVw5C0SI= +github.com/yuin/goldmark-highlighting v0.0.0-20210516132338-9216f9c5aa01/go.mod h1:TwKQPa5XkCCRC2GRZ5wtfNUTQ2+9/i19mGRijFeJ4BE= github.com/yuin/goldmark-meta v1.0.0 h1:ScsatUIT2gFS6azqzLGUjgOnELsBOxMXerM3ogdJhAM= github.com/yuin/goldmark-meta v1.0.0/go.mod h1:zsNNOrZ4nLuyHAJeLQEZcQat8dm70SmB2kHbls092Gc= github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= github.com/ziutek/mymysql v1.5.4/go.mod h1:LMSpPZ6DbqWFxNCHW77HeMg9I646SAhApZ/wKdgO/C0= go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= -go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= -go.etcd.io/bbolt v1.3.5 h1:XAzx9gjCb0Rxj7EoqcClPD1d5ZBxZJk0jbuoPHenBt0= go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= -go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg= +go.etcd.io/bbolt v1.3.6 h1:/ecaJf0sk1l4l6V4awd65v2C3ILy7MSj+s/x1ADCIMU= +go.etcd.io/bbolt v1.3.6/go.mod h1:qXsaaIqmgQH0T+OPdb99Bf+PKfBBQVAdyD6TY9G8XM4= go.jolheiser.com/hcaptcha v0.0.4 h1:RrDERcr/Tz/kWyJenjVtI+V09RtLinXxlAemiwN5F+I= go.jolheiser.com/hcaptcha v0.0.4/go.mod h1:aw32WQOxnQZ6E06C0LypCf+sxNxPACyOnq+ZGnrIYho= go.jolheiser.com/pwn v0.0.3 h1:MQowb3QvCL5r5NmHmCPxw93SdjfgJ0q6rAwYn4i1Hjg= @@ -1171,8 +1090,6 @@ go.mongodb.org/mongo-driver v1.4.4/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4S go.mongodb.org/mongo-driver v1.4.6/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc= go.mongodb.org/mongo-driver v1.5.1 h1:9nOVLGDfOaZ9R0tBumx/BcuqkbFpyTCU2r/Po7A2azI= go.mongodb.org/mongo-driver v1.5.1/go.mod h1:gRXCHX4Jo7J0IJ1oDQyUxF7jfy19UfxniMS4xxMmUqw= -go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= -go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -1181,32 +1098,26 @@ go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opentelemetry.io/otel v0.14.0/go.mod h1:vH5xEuwy7Rts0GNtsCW3HYQoZDY+OmBJ6t1bFGGlxgw= -go.opentelemetry.io/otel v0.19.0 h1:Lenfy7QHRXPZVsw/12CWpxX6d/JkrX8wrx2vO8G80Ng= -go.opentelemetry.io/otel v0.19.0/go.mod h1:j9bF567N9EfomkSidSfmMwIwIBuP37AMAIzVW85OxSg= -go.opentelemetry.io/otel/metric v0.19.0 h1:dtZ1Ju44gkJkYvo+3qGqVXmf88tc+a42edOywypengg= -go.opentelemetry.io/otel/metric v0.19.0/go.mod h1:8f9fglJPRnXuskQmKpnad31lcLJ2VmNNqIsx/uIwBSc= -go.opentelemetry.io/otel/oteltest v0.19.0 h1:YVfA0ByROYqTwOxqHVZYZExzEpfZor+MU1rU+ip2v9Q= -go.opentelemetry.io/otel/oteltest v0.19.0/go.mod h1:tI4yxwh8U21v7JD6R3BcA/2+RBoTKFexE/PJ/nSO7IA= -go.opentelemetry.io/otel/trace v0.19.0 h1:1ucYlenXIDA1OlHVLDZKX0ObXV5RLaq06DtUKz5e5zc= -go.opentelemetry.io/otel/trace v0.19.0/go.mod h1:4IXiNextNOpPnRlI4ryK69mn5iC84bjBWZQA5DXz/qg= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= -go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/atomic v1.8.0 h1:CUhrE4N1rqSE6FM9ecihEjRkLQu8cDfgDyoOs83mEY4= +go.uber.org/atomic v1.8.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/goleak v1.1.10 h1:z+mqJhf6ss6BSfSM671tgKyZBFPTTJM+HLxnhPC3wu0= +go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= -go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= -go.uber.org/multierr v1.6.0 h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/multierr v1.7.0 h1:zaiO/rmgFjbmCXdSYJWQcdvOCsthmdaHfr3Gm2Kx4Ec= +go.uber.org/multierr v1.7.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak= go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= go.uber.org/zap v1.15.0/go.mod h1:Mb2vm2krFEG5DV0W9qcHBYFtp/Wku1cvYaqPsS/WYfc= -go.uber.org/zap v1.16.0 h1:uFRZXykJGK9lLY4HtgSw44DnIcAM+kRBP7x5m+NpAOM= -go.uber.org/zap v1.16.0/go.mod h1:MA8QOfq0BHJwdXa996Y4dYkAqRKB8/1K1QMMZVaNZjQ= +go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= +go.uber.org/zap v1.18.1 h1:CSUJ2mjFszzEWt4CdKISEuChVIXGBn3lAPwkRGyVrc4= +go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= @@ -1221,7 +1132,6 @@ golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190617133340-57b3e21c3d56/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= @@ -1229,11 +1139,12 @@ golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200604202706-70a84ac30bf9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200709230013-948cd5f35899/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201216223049-8b5274cf687f/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b h1:7mWr3k41Qtv8XlltBkDkl8LoP3mpSgBW8BUoxtEdbXg= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= +golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e h1:gsTQYXdTw2Gq7RBsWvlQ91b+aEQ6bXFUngBGuR8sPpI= +golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -1276,12 +1187,10 @@ golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73r golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181108082009-03003ca0c849/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190320064053-1272bf9dcd53/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= @@ -1296,7 +1205,6 @@ golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -1326,9 +1234,9 @@ golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210326060303-6b1517762897/go.mod h1:uSPa2vr4CLtc/ILN5odXGNXS6mhrKVzTaCXzk9m6W3k= golang.org/x/net v0.0.0-20210331060903-cb1fcc7394e5/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20210331212208-0fccb6fa2b5c/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20210421230115-4e50805a0758 h1:aEpZnXcAmXkd6AvLb2OPt+EN1Zu/8Ne3pCqPjja5PXY= -golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM= +golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210614182718-04defd469f4e h1:XpT3nA5TvE525Ne3hInMh6+GETgn27Zfm9dxsThnX2Q= +golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -1340,8 +1248,8 @@ golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210323180902-22b0adad7558/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210413134643-5e61552d6c78 h1:rPRtHfUb0UKZeZ6GH4K4Nt4YRbE9V1u+QZX5upZXqJQ= -golang.org/x/oauth2 v0.0.0-20210413134643-5e61552d6c78/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914 h1:3B43BWw0xEBsLZ/NO1VALz6fppU3481pik+2Ksv45z8= +golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -1352,8 +1260,9 @@ golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201207232520-09787c993a3a h1:DcqTD9SDLc+1P/r1EmRBwnVsrOwW+kk2vWf9n+1sGhs= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1361,7 +1270,6 @@ golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181221143128-b4a75ba826a6/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1385,14 +1293,12 @@ golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191010194322-b09406accb47/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1414,21 +1320,29 @@ golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200923182605-d9f96fdee20d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201126233918-771906719818/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210309074719-68d13333faf2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210324051608-47abb6519492/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210421221651-33663a62ff08 h1:qyN5bV+96OX8pL78eXDuz6YlDPzCYgdW74H5yE9BoSU= -golang.org/x/sys v0.0.0-20210421221651-33663a62ff08/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210502180810-71e4cd670f79/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210525143221-35b2ab0089ea/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c h1:F1jZWGFhYfh0Ci55sIpILtKKK8p3i2/krTr0H1rg74I= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1440,14 +1354,12 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba h1:O8mE0/t419eoIwhTFpKVkHiTs/Igowgfkj25AcZrtiE= -golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20210611083556-38a9dc6acbc6 h1:Vv0JUPWTyeqUq42B2WJ1FeIDjjvGKoA2Ss+Ts0lAVbs= +golang.org/x/time v0.0.0-20210611083556-38a9dc6acbc6/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -1476,16 +1388,15 @@ golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191216052735-49a3e744a425/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= @@ -1509,6 +1420,7 @@ golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82u golang.org/x/tools v0.0.0-20200928182047-19e03678916f/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= golang.org/x/tools v0.0.0-20200929161345-d7fc70abf50f/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201124115921-2c860bdd6e78/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= @@ -1522,7 +1434,6 @@ golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= @@ -1544,7 +1455,6 @@ google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -1558,7 +1468,6 @@ google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRn google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190530194941-fb225487d101/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s= google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= @@ -1590,15 +1499,10 @@ google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= -google.golang.org/grpc v1.22.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= @@ -1623,8 +1527,9 @@ google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpAD google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc h1:2gGKlE2+asNV9m7xrywl36YYNnBG5ZQ0r/BOOxqPpmk= gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc/go.mod h1:m7x9LTH6d71AHyAX77c9yqWCCa3UKHcVEj9y7hAtKDk= @@ -1634,10 +1539,8 @@ gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o= gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df h1:n7WqCuqOuCbNr617RXOY0AWRXxgwEyPp2z+p0+hgMuE= gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df/go.mod h1:LRQQ+SO6ZHR7tOkpBDuZnXENFzX8qRjMDMyPD6BRkCw= gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= @@ -1667,27 +1570,51 @@ gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c/go.mod h1:K4uyk7z7BCEPqu6E+C gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.0.1-2020.1.4 h1:UoveltGrhghAA7ePc+e+QYDHXrBps2PqFZiHkGR/xK8= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +modernc.org/cc/v3 v3.31.5-0.20210308123301-7a3e9dab9009 h1:u0oCo5b9wyLr++HF3AN9JicGhkUxJhMz51+8TIZH9N0= +modernc.org/cc/v3 v3.31.5-0.20210308123301-7a3e9dab9009/go.mod h1:0R6jl1aZlIl2avnYfbfHBS1QB6/f+16mihBObaBC878= +modernc.org/ccgo/v3 v3.9.0 h1:JbcEIqjw4Agf+0g3Tc85YvfYqkkFOv6xBwS4zkfqSoA= +modernc.org/ccgo/v3 v3.9.0/go.mod h1:nQbgkn8mwzPdp4mm6BT6+p85ugQ7FrGgIcYaE7nSrpY= +modernc.org/httpfs v1.0.6 h1:AAgIpFZRXuYnkjftxTAZwMIiwEqAfk8aVB2/oA6nAeM= +modernc.org/httpfs v1.0.6/go.mod h1:7dosgurJGp0sPaRanU53W4xZYKh14wfzX420oZADeHM= +modernc.org/libc v1.7.13-0.20210308123627-12f642a52bb8/go.mod h1:U1eq8YWr/Kc1RWCMFUWEdkTg8OTcfLw2kY8EDwl039w= +modernc.org/libc v1.8.0 h1:Pp4uv9g0csgBMpGPABKtkieF6O5MGhfGo6ZiOdlYfR8= +modernc.org/libc v1.8.0/go.mod h1:U1eq8YWr/Kc1RWCMFUWEdkTg8OTcfLw2kY8EDwl039w= +modernc.org/mathutil v1.1.1/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= +modernc.org/mathutil v1.2.2 h1:+yFk8hBprV+4c0U9GjFtL+dV3N8hOJ8JCituQcMShFY= +modernc.org/mathutil v1.2.2/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= +modernc.org/memory v1.0.4 h1:utMBrFcpnQDdNsmM6asmyH/FM9TqLPS7XF7otpJmrwM= +modernc.org/memory v1.0.4/go.mod h1:nV2OApxradM3/OVbs2/0OsP6nPfakXpi50C7dcoHXlc= +modernc.org/opt v0.1.1 h1:/0RX92k9vwVeDXj+Xn23DKp2VJubL7k8qNffND6qn3A= +modernc.org/opt v0.1.1/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0= +modernc.org/sqlite v1.10.1-0.20210314190707-798bbeb9bb84 h1:rgEUzE849tFlHSoeCrKyS9cZAljC+DY7MdMHKq6R6sY= +modernc.org/sqlite v1.10.1-0.20210314190707-798bbeb9bb84/go.mod h1:PGzq6qlhyYjL6uVbSgS6WoF7ZopTW/sI7+7p+mb4ZVU= +modernc.org/strutil v1.1.0 h1:+1/yCzZxY2pZwwrsbH+4T7BQMoLQ9QiBshRC9eicYsc= +modernc.org/strutil v1.1.0/go.mod h1:lstksw84oURvj9y3tn8lGvRxyRC1S2+g5uuIzNfIOBs= +modernc.org/tcl v1.5.0 h1:euZSUNfE0Fd4W8VqXI1Ly1v7fqDJoBuAV88Ea+SnaSs= +modernc.org/tcl v1.5.0/go.mod h1:gb57hj4pO8fRrK54zveIfFXBaMHK3SKJNWcmRw1cRzc= +modernc.org/token v1.0.0 h1:a0jaWiNMDhDUtqOj09wvjWWAqd3q7WpBulmL9H2egsk= +modernc.org/token v1.0.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= +modernc.org/z v1.0.1-0.20210308123920-1f282aa71362/go.mod h1:8/SRk5C/HgiQWCgXdfpb+1RvhORdkz5sw72d3jjtyqA= +modernc.org/z v1.0.1 h1:WyIDpEpAIx4Hel6q/Pcgj/VhaQV5XPJ2I6ryIYbjnpc= +modernc.org/z v1.0.1/go.mod h1:8/SRk5C/HgiQWCgXdfpb+1RvhORdkz5sw72d3jjtyqA= mvdan.cc/xurls/v2 v2.2.0 h1:NSZPykBXJFCetGZykLAxaL6SIpvbVy/UFEniIfHAa8A= mvdan.cc/xurls/v2 v2.2.0/go.mod h1:EV1RMtya9D6G5DMYPGD8zTQzaHet6Jh8gFlRgGRJeO8= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= -sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= -sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251 h1:mUcz5b3FJbP5Cvdq7Khzn6J9OCUQJaBwgBkCR+MOwSs= strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251/go.mod h1:FJGmPh3vz9jSos1L/F91iAgnC/aejc0wIIrF2ZwJxdY= xorm.io/builder v0.3.7/go.mod h1:aUW0S9eb9VCaPohFCH3j7czOx1PMW3i1HrSzbLYGBSE= +xorm.io/builder v0.3.8/go.mod h1:aUW0S9eb9VCaPohFCH3j7czOx1PMW3i1HrSzbLYGBSE= xorm.io/builder v0.3.9 h1:Sd65/LdWyO7LR8+Cbd+e7mm3sK/7U9k0jS3999IDHMc= xorm.io/builder v0.3.9/go.mod h1:aUW0S9eb9VCaPohFCH3j7czOx1PMW3i1HrSzbLYGBSE= xorm.io/xorm v1.0.6/go.mod h1:uF9EtbhODq5kNWxMbnBEj8hRRZnlcNSz2t2N7HW/+A4= -xorm.io/xorm v1.0.7 h1:26yBTDVI+CfQpVz2Y88fISh+aiJXIPP4eNoTJlwzsC4= -xorm.io/xorm v1.0.7/go.mod h1:uF9EtbhODq5kNWxMbnBEj8hRRZnlcNSz2t2N7HW/+A4= +xorm.io/xorm v1.1.2 h1:bje+1KZvK3m5AHtZNfUDlKEEyuw/IRHT+an0CLIG5TU= +xorm.io/xorm v1.1.2/go.mod h1:Cb0DKYTHbyECMaSfgRnIZp5aiUgQozxcJJ0vzcLGJSg= diff --git a/integrations/README.md b/integrations/README.md index ccb55377f56e..0b91a7d254a1 100644 --- a/integrations/README.md +++ b/integrations/README.md @@ -28,7 +28,7 @@ make test-sqlite Setup a mysql database inside docker ``` docker run -e "MYSQL_DATABASE=test" -e "MYSQL_ALLOW_EMPTY_PASSWORD=yes" -p 3306:3306 --rm --name mysql mysql:latest #(just ctrl-c to stop db and clean the container) -docker run -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" --rm --name elasticsearch elasticsearch:7.6.0 #(in a secound terminal, just ctrl-c to stop db and clean the container) +docker run -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" --rm --name elasticsearch elasticsearch:7.6.0 #(in a second terminal, just ctrl-c to stop db and clean the container) ``` Start tests based on the database container ``` diff --git a/integrations/README_ZH.md b/integrations/README_ZH.md index fd317645603a..39639f9b89ad 100644 --- a/integrations/README_ZH.md +++ b/integrations/README_ZH.md @@ -26,7 +26,7 @@ make test-sqlite ## 如何使用 mysql 数据库进行集成测试 首先在docker容器里部署一个 mysql 数据库 ``` -docker run -e "MYSQL_DATABASE=test" -e "MYSQL_ALLOW_EMPTY_PASSWORD=yes" -p 3306:3306 --rm --name mysql mysql:5.7 #(just ctrl-c to stop db and clean the container) +docker run -e "MYSQL_DATABASE=test" -e "MYSQL_ALLOW_EMPTY_PASSWORD=yes" -p 3306:3306 --rm --name mysql mysql:8 #(just ctrl-c to stop db and clean the container) ``` 之后便可以基于这个数据库进行集成测试 ``` @@ -36,7 +36,7 @@ TEST_MYSQL_HOST=localhost:3306 TEST_MYSQL_DBNAME=test TEST_MYSQL_USERNAME=root T ## 如何使用 pgsql 数据库进行集成测试 同上,首先在 docker 容器里部署一个 pgsql 数据库 ``` -docker run -e "POSTGRES_DB=test" -p 5432:5432 --rm --name pgsql postgres:9.5 #(just ctrl-c to stop db and clean the container) +docker run -e "POSTGRES_DB=test" -p 5432:5432 --rm --name pgsql postgres:13 #(just ctrl-c to stop db and clean the container) ``` 之后便可以基于这个数据库进行集成测试 ``` diff --git a/integrations/api_admin_test.go b/integrations/api_admin_test.go index b3a0de36d369..b93179d21df0 100644 --- a/integrations/api_admin_test.go +++ b/integrations/api_admin_test.go @@ -195,7 +195,7 @@ func TestAPIEditUser(t *testing.T) { assert.EqualValues(t, "email is not allowed to be empty string", errMap["message"].(string)) user2 := models.AssertExistsAndLoadBean(t, &models.User{LoginName: "user2"}).(*models.User) - assert.Equal(t, false, user2.IsRestricted) + assert.False(t, user2.IsRestricted) bTrue := true req = NewRequestWithJSON(t, "PATCH", urlStr, api.EditUserOption{ // required @@ -206,5 +206,5 @@ func TestAPIEditUser(t *testing.T) { }) session.MakeRequest(t, req, http.StatusOK) user2 = models.AssertExistsAndLoadBean(t, &models.User{LoginName: "user2"}).(*models.User) - assert.Equal(t, true, user2.IsRestricted) + assert.True(t, user2.IsRestricted) } diff --git a/integrations/api_gpg_keys_test.go b/integrations/api_gpg_keys_test.go index e664c3c25655..8fc4124a4898 100644 --- a/integrations/api_gpg_keys_test.go +++ b/integrations/api_gpg_keys_test.go @@ -29,14 +29,13 @@ func TestGPGKeys(t *testing.T) { results []int }{ {name: "NoLogin", makeRequest: MakeRequest, token: "", - results: []int{http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized}, + results: []int{http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized, http.StatusUnauthorized}, }, {name: "LoggedAsUser2", makeRequest: session.MakeRequest, token: token, - results: []int{http.StatusOK, http.StatusOK, http.StatusNotFound, http.StatusNoContent, http.StatusUnprocessableEntity, http.StatusNotFound, http.StatusCreated, http.StatusCreated}}, + results: []int{http.StatusOK, http.StatusOK, http.StatusNotFound, http.StatusNoContent, http.StatusUnprocessableEntity, http.StatusNotFound, http.StatusCreated, http.StatusNotFound, http.StatusCreated}}, } for _, tc := range tt { - //Basic test on result code t.Run(tc.name, func(t *testing.T) { t.Run("ViewOwnGPGKeys", func(t *testing.T) { @@ -61,7 +60,7 @@ func TestGPGKeys(t *testing.T) { t.Run("CreateValidGPGKey", func(t *testing.T) { testCreateValidGPGKey(t, tc.makeRequest, tc.token, tc.results[6]) }) - t.Run("CreateValidSecondaryEmailGPGKey", func(t *testing.T) { + t.Run("CreateValidSecondaryEmailGPGKeyNotActivated", func(t *testing.T) { testCreateValidSecondaryEmailGPGKey(t, tc.makeRequest, tc.token, tc.results[7]) }) }) @@ -75,46 +74,32 @@ func TestGPGKeys(t *testing.T) { req := NewRequest(t, "GET", "/api/v1/user/gpg_keys?token="+token) //GET all keys resp := session.MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &keys) + assert.Len(t, keys, 1) primaryKey1 := keys[0] //Primary key 1 assert.EqualValues(t, "38EA3BCED732982C", primaryKey1.KeyID) - assert.EqualValues(t, 1, len(primaryKey1.Emails)) + assert.Len(t, primaryKey1.Emails, 1) assert.EqualValues(t, "user2@example.com", primaryKey1.Emails[0].Email) - assert.EqualValues(t, true, primaryKey1.Emails[0].Verified) + assert.True(t, primaryKey1.Emails[0].Verified) subKey := primaryKey1.SubsKey[0] //Subkey of 38EA3BCED732982C assert.EqualValues(t, "70D7C694D17D03AD", subKey.KeyID) - assert.EqualValues(t, 0, len(subKey.Emails)) - - primaryKey2 := keys[1] //Primary key 2 - assert.EqualValues(t, "FABF39739FE1E927", primaryKey2.KeyID) - assert.EqualValues(t, 1, len(primaryKey2.Emails)) - assert.EqualValues(t, "user21@example.com", primaryKey2.Emails[0].Email) - assert.EqualValues(t, false, primaryKey2.Emails[0].Verified) + assert.Empty(t, subKey.Emails) var key api.GPGKey req = NewRequest(t, "GET", "/api/v1/user/gpg_keys/"+strconv.FormatInt(primaryKey1.ID, 10)+"?token="+token) //Primary key 1 resp = session.MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &key) assert.EqualValues(t, "38EA3BCED732982C", key.KeyID) - assert.EqualValues(t, 1, len(key.Emails)) + assert.Len(t, key.Emails, 1) assert.EqualValues(t, "user2@example.com", key.Emails[0].Email) - assert.EqualValues(t, true, key.Emails[0].Verified) + assert.True(t, key.Emails[0].Verified) req = NewRequest(t, "GET", "/api/v1/user/gpg_keys/"+strconv.FormatInt(subKey.ID, 10)+"?token="+token) //Subkey of 38EA3BCED732982C resp = session.MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &key) assert.EqualValues(t, "70D7C694D17D03AD", key.KeyID) - assert.EqualValues(t, 0, len(key.Emails)) - - req = NewRequest(t, "GET", "/api/v1/user/gpg_keys/"+strconv.FormatInt(primaryKey2.ID, 10)+"?token="+token) //Primary key 2 - resp = session.MakeRequest(t, req, http.StatusOK) - DecodeJSON(t, resp, &key) - assert.EqualValues(t, "FABF39739FE1E927", key.KeyID) - assert.EqualValues(t, 1, len(key.Emails)) - assert.EqualValues(t, "user21@example.com", key.Emails[0].Email) - assert.EqualValues(t, false, key.Emails[0].Verified) - + assert.Empty(t, key.Emails) }) //Check state after basic add @@ -124,7 +109,7 @@ func TestGPGKeys(t *testing.T) { req := NewRequest(t, "GET", "/api/v1/repos/user2/repo16/branches/not-signed?token="+token) resp := session.MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &branch) - assert.EqualValues(t, false, branch.Commit.Verification.Verified) + assert.False(t, branch.Commit.Verification.Verified) }) t.Run("SignedWithNotValidatedEmail", func(t *testing.T) { @@ -132,7 +117,7 @@ func TestGPGKeys(t *testing.T) { req := NewRequest(t, "GET", "/api/v1/repos/user2/repo16/branches/good-sign-not-yet-validated?token="+token) resp := session.MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &branch) - assert.EqualValues(t, false, branch.Commit.Verification.Verified) + assert.False(t, branch.Commit.Verification.Verified) }) t.Run("SignedWithValidEmail", func(t *testing.T) { @@ -140,7 +125,7 @@ func TestGPGKeys(t *testing.T) { req := NewRequest(t, "GET", "/api/v1/repos/user2/repo16/branches/good-sign?token="+token) resp := session.MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &branch) - assert.EqualValues(t, true, branch.Commit.Verification.Verified) + assert.True(t, branch.Commit.Verification.Verified) }) }) } @@ -231,35 +216,46 @@ uy6MA3VSB99SK9ducGmE1Jv8mcziREroz2TEGr0zPs6h } func testCreateValidSecondaryEmailGPGKey(t *testing.T, makeRequest makeRequestFunc, token string, expected int) { - //User2 //secondary and not activated + //User2 //secondary and not activated testCreateGPGKey(t, makeRequest, token, expected, `-----BEGIN PGP PUBLIC KEY BLOCK----- -mQENBFmGWN4BCAC18V4tVGO65VLCV7p14FuXJlUtZ5CuYMvgEkcOqrvRaBSW9ao4 -PGESOhJpfWpnW3QgJniYndLzPpsmdHEclEER6aZjiNgReWPOjHD5tykWocZAJqXD -eY1ym59gvVMLcfbV2yQsyR2hbJlc+dJsl16tigSEe3nwxZSw2IsW92pgEzT9JNUr -Q+mC8dw4dqY0tYmFazYUGNxufUc/twgQT/Or1aNs0az5Q6Jft4rrTRsh/S7We0VB -COKGkdcQyYgAls7HJBuPjQRi6DM9VhgBSHLAgSLyaUcZvhZBJr8Qe/q4PP3/kYDJ -wm4RMnjOLz2pFZPgtRqgcAwpmFtLrACbEB3JABEBAAG0GlVzZXIyIDx1c2VyMjFA -ZXhhbXBsZS5jb20+iQFUBBMBCAA+FiEEPOLHOjPSO42DWM57+r85c5/h6ScFAlmG -WN4CGwMFCQPCZwAFCwkIBwIGFQgJCgsCBBYCAwECHgECF4AACgkQ+r85c5/h6Sfx -Lgf/dq64NBV8+X9an3seaLxePRviva48e4K67/wV/JxtXNO5Z/DhMGz5kHXCsG9D -CXuWYO8ehlTjEnMZ6qqdDnY+H6bQsb2OS5oPn4RwpPXslAjEKtojPAr0dDsMS2DB -dUuIm1AoOnewOVO0OFRf1EqX1bivxnN0FVMcO0m8AczfnKDaGb0y/qg/Y9JAsKqp -j5pZNMWUkntRtGySeJ4CVJMmkVKJAHsa1Qj6MKdFeid4h4y94cBJ4ZdyBxNdpQOx -ydf0doicovfeqGNO4oWzsGP4RBK2CqGPCUT+EFl20jPvMkKwOjxgqc8p0z3b2UT9 -+9bnmCGHgF/fW1HJ3iKmfFPqnLkBDQRZhljeAQgA5AirU/NJGgm19ZJYFOiHftjS -azbrPxGeD3cSqmvDPIMc1DNZGfQV5D4EVumnVbQBtL6xHFoGKz9KisUMbe4a/X2J -S8JmIphQWG0vMJX1DaZIzr2gT71MnPD7JMGsSUCh5dIKpTNTZX4w+oGPGOu0/UlL -x0448AryKwp30J2p6D4GeI0nb03n35S2lTOpnHDn1wj7Jl/8LS2fdFOdNaNHXSZe -twdSwJKhyBEiScgeHBDyKqo8zWkYoSb9eA2HiYlbVaiNtp24KP1mIEpiUdrRjWno -zauYSZGHZlOFMgF4dKWuetPiuH9m7UYZGKyMLfQ9vYFb+xcPh2bLCQHJ1OEmMQAR -AQABiQE8BBgBCAAmFiEEPOLHOjPSO42DWM57+r85c5/h6ScFAlmGWN4CGwwFCQPC -ZwAACgkQ+r85c5/h6Sfjfwf+O4WEjRdvPJLxNy7mfAGoAqDMHIwyH/tVzYgyVhnG -h/+cfRxJbGc3rpjYdr8dmvghzjEAout8uibPWaIqs63RCAPGPqgWLfxNO5c8+y8V -LZMVOTV26l2olkkdBWAuhLqKTNh6TiQva03yhOgHWj4XDvFfxICWPFXVd6t5ELpD -iApGu1OAj8JfhmzbG03Yzx+Ku7bWDxMonx3V/IDEu5LS5zrboHYDKCA53bXXghoi -Aceqql+PKrDwEjoY4bptwMHLmcjGjdCQ//Qx1neho7nZcS7xjTucY8gQuulwCyXF -y6wM+wMz8dunIG9gw4+Re6c4Rz9tX1kzxLrU7Pl21tMqfg== -=0N/9 +mQGNBGC2K2cBDAC1+Xgk+8UfhASVgRngQi4rnQ8k0t+bWsBz4Czd26+cxVDRwlTT +8PALdrbrY/e9iXjcVcZ8Npo4UYe7/LfnL57dc7tgbenRGYYrWyVoNNv58BVw4xCY +RmgvdHWIIPGuz3aME0smHxbJ2KewYTqjTPuVKF/wrHTwCpVWdjYKC5KDo3yx0mro +xf9vOJOnkWNMiEw7TiZfkrbUqxyA53BVsSNKRX5C3b4FJcVT7eiAq7sDAaFxjEHy +ahZslmvg7XZxWzSVzxDNesR7f4xuop8HBjzaluJoVuwiyWculTvz1b6hyHVQr+ad +h8JGjj1tySI65OTFsTuptsfHXjtjl/NR4P6BXkf+FVwweaTQaEzpHkv0m9b9pY43 +CY/8XtS4uNPermiLG/Z0BB1eOCdoOQVHpjOa55IXQWhxXB6NZVyowiUbrR7jLDQy +5JP7D1HmErTR8JRm3VDqGbSaCgugRgFX+lb/fpgFp9k02OeK+JQudolZOt1mVk+T +C4xmEWxfiH15/JMAEQEAAbQbdXNlcjIgPHVzZXIyLTJAZXhhbXBsZS5jb20+iQHU +BBMBCAA+FiEEB/Y4DM3Ba2H9iXmlPO9G70C+/D4FAmC2K2cCGwMFCQPCZwAFCwkI +BwIGFQoJCAsCBBYCAwECHgECF4AACgkQPO9G70C+/D59/Av/XZIhCH4X2FpxCO3d +oCa+sbYkBL5xeUoPfAx5ThXzqL/tllO88TKTMEGZF3k5pocXWH0xmhqlvDTcdb0i +W3O0CN8FLmuotU51c0JC1mt9zwJP9PeJNyqxrMm01Yzj55z/Dz3QHSTlDjrWTWjn +YBqDf2HfdM177oydfSYmevZni1aDmBalWpFPRvqISCO7uFnvg1hJQ5mD/0qie663 +QJ8LAAANg32H9DyPnYi9wU62WX0DMUVTjKctT3cnYCbirjjJ7ZlCCm+cf61CRX1B +E1Ng/Ef3ZcUfXWitZSjfET/pKEMSNjsQawFpZ/LPCBl+UPHzaTPAASeGJvcbZ3py +wZQLQc1MCu2hmMBQ8zHQTdS2Pp0RISxCQLYvVQL6DrcJDNiSqn9p9RQt5c5r5Pjx +80BIPcjj3glOVP7PYE2azQAkt6reEjhimwCfjeDpiPnkBTY7Av2jCcUFhhemDY/j +TRXK1paLphhJ36zC22SeHGxNNakjjuUakqB85DEUeoWuVm6ouQGNBGC2K2cBDADx +G2rIAgMjdPtofhkEZXwv6zdNwmYOlIIM+59bam9Ep/vFq8F5f+xldevm5dvM8SeR +pNwDGSOUf5OKBWBdsJFhlYBl7+EcKd/Tent/XS6JoA9ffF33b+r04L543+ykiKON +WYeYi0F4WwYTIQgqZHJze1sPVkYGR5F0bL8PAcLuwd5dzZVi/q2HakrGdg29N8oY +b/XnoR7FflPrNYdzO6hawi5Inx7KS7aWa0ZkARb0F4HSct+/m6nAZVsoJINLudyQ +ut2NWeU8rWIm1hqyIxQFvuQJy46umq++10J/sWA98bkg41Rx+72+eP7DM5v8IgUp +clJsfljRXIBWbmRAVZvtNI7PX9fwMMhf4M7wHO7G2WV39o1exKps5xFFcn8PUQiX +jCSR81M145CgCdmLUR1y0pdkN/WIqjXBhkPIvO2dxEcodMNHb1aUUuUOnww6+xIP +8rGVw+a2DUiALc8Qr5RP21AYKRctfiwhSQh2KODveMtyLI3U9C/eLRPp+QM3XB8A +EQEAAYkBvAQYAQgAJhYhBAf2OAzNwWth/Yl5pTzvRu9Avvw+BQJgtitnAhsMBQkD +wmcAAAoJEDzvRu9Avvw+3FcMAJBwupyJ4zwQFxTJ5BkDlusG3U2FXEf3bDrXhvNd +qi8eS8Vo/vRiH/w/my5JFpz1o2tJToryF71D+uF5DTItalKquhsQ9reAEmXggqOh +9Jd9mWJIEEWcRORiLNDKENKvE8bouw4U4hRaSF0IaGzAe5mO+oOvwal8L97wFxrZ +4leM1GzkopiuNfbkkBBw2KJcMjYBHzzXSCALnVwhjbgkBEWPIg38APT3cr9KfnMM +q8+tvsGLj4piAl3Lww7+GhSsDOUXH8btR41BSAQDrbO5q6oi/h4nuxoNmQIDW/Ug +s+dd5hnY2FtHRjb4FCR9kAjdTE6stc8wzohWfbg1N+12TTA2ylByAumICVXixavH +RJ7l0OiWJk388qw9mqh3k8HcBxL7OfDlFC9oPmCS0iYiIwW/Yc80kBhoxcvl/Xa7 +mIMMn8taHIaQO7v9ln2EVQYTzbNCmwTw9ovTM0j/Pbkg2EftfP1TCoxQHvBnsCED +6qgtsUdi5eviONRkBgeZtN3oxA== +=MgDv -----END PGP PUBLIC KEY BLOCK-----`) } diff --git a/integrations/api_issue_test.go b/integrations/api_issue_test.go index f932f79feea0..74512cafa3f6 100644 --- a/integrations/api_issue_test.go +++ b/integrations/api_issue_test.go @@ -25,9 +25,10 @@ func TestAPIListIssues(t *testing.T) { session := loginUser(t, owner.Name) token := getTokenForLoggedInUser(t, session) - req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/issues?state=all&token=%s", - owner.Name, repo.Name, token) - resp := session.MakeRequest(t, req, http.StatusOK) + link, _ := url.Parse(fmt.Sprintf("/api/v1/repos/%s/%s/issues", owner.Name, repo.Name)) + + link.RawQuery = url.Values{"token": {token}, "state": {"all"}}.Encode() + resp := session.MakeRequest(t, NewRequest(t, "GET", link.String()), http.StatusOK) var apiIssues []*api.Issue DecodeJSON(t, resp, &apiIssues) assert.Len(t, apiIssues, models.GetCount(t, &models.Issue{RepoID: repo.ID})) @@ -36,15 +37,34 @@ func TestAPIListIssues(t *testing.T) { } // test milestone filter - req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/issues?state=all&type=all&milestones=ignore,milestone1,3,4&token=%s", - owner.Name, repo.Name, token) - resp = session.MakeRequest(t, req, http.StatusOK) + link.RawQuery = url.Values{"token": {token}, "state": {"all"}, "type": {"all"}, "milestones": {"ignore,milestone1,3,4"}}.Encode() + resp = session.MakeRequest(t, NewRequest(t, "GET", link.String()), http.StatusOK) DecodeJSON(t, resp, &apiIssues) if assert.Len(t, apiIssues, 2) { assert.EqualValues(t, 3, apiIssues[0].Milestone.ID) assert.EqualValues(t, 1, apiIssues[1].Milestone.ID) } + link.RawQuery = url.Values{"token": {token}, "state": {"all"}, "created_by": {"user2"}}.Encode() + resp = session.MakeRequest(t, NewRequest(t, "GET", link.String()), http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + if assert.Len(t, apiIssues, 1) { + assert.EqualValues(t, 5, apiIssues[0].ID) + } + + link.RawQuery = url.Values{"token": {token}, "state": {"all"}, "assigned_by": {"user1"}}.Encode() + resp = session.MakeRequest(t, NewRequest(t, "GET", link.String()), http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + if assert.Len(t, apiIssues, 1) { + assert.EqualValues(t, 1, apiIssues[0].ID) + } + + link.RawQuery = url.Values{"token": {token}, "state": {"all"}, "mentioned_by": {"user4"}}.Encode() + resp = session.MakeRequest(t, NewRequest(t, "GET", link.String()), http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + if assert.Len(t, apiIssues, 1) { + assert.EqualValues(t, 1, apiIssues[0].ID) + } } func TestAPICreateIssue(t *testing.T) { @@ -65,8 +85,8 @@ func TestAPICreateIssue(t *testing.T) { resp := session.MakeRequest(t, req, http.StatusCreated) var apiIssue api.Issue DecodeJSON(t, resp, &apiIssue) - assert.Equal(t, apiIssue.Body, body) - assert.Equal(t, apiIssue.Title, title) + assert.Equal(t, body, apiIssue.Body) + assert.Equal(t, title, apiIssue.Title) models.AssertExistsAndLoadBean(t, &models.Issue{ RepoID: repoBefore.ID, @@ -202,6 +222,20 @@ func TestAPISearchIssues(t *testing.T) { resp = session.MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &apiIssues) assert.Len(t, apiIssues, 1) + + query = url.Values{"milestones": {"milestone1"}, "state": {"all"}} + link.RawQuery = query.Encode() + req = NewRequest(t, "GET", link.String()) + resp = session.MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + assert.Len(t, apiIssues, 1) + + query = url.Values{"milestones": {"milestone1,milestone3"}, "state": {"all"}} + link.RawQuery = query.Encode() + req = NewRequest(t, "GET", link.String()) + resp = session.MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + assert.Len(t, apiIssues, 2) } func TestAPISearchIssuesWithLabels(t *testing.T) { diff --git a/integrations/api_notification_test.go b/integrations/api_notification_test.go index 42041734200b..96af14fb82a9 100644 --- a/integrations/api_notification_test.go +++ b/integrations/api_notification_test.go @@ -45,14 +45,14 @@ func TestAPINotification(t *testing.T) { assert.Len(t, apiNL, 3) assert.EqualValues(t, 4, apiNL[0].ID) - assert.EqualValues(t, true, apiNL[0].Unread) - assert.EqualValues(t, false, apiNL[0].Pinned) + assert.True(t, apiNL[0].Unread) + assert.False(t, apiNL[0].Pinned) assert.EqualValues(t, 3, apiNL[1].ID) - assert.EqualValues(t, false, apiNL[1].Unread) - assert.EqualValues(t, true, apiNL[1].Pinned) + assert.False(t, apiNL[1].Unread) + assert.True(t, apiNL[1].Pinned) assert.EqualValues(t, 2, apiNL[2].ID) - assert.EqualValues(t, false, apiNL[2].Unread) - assert.EqualValues(t, false, apiNL[2].Pinned) + assert.False(t, apiNL[2].Unread) + assert.False(t, apiNL[2].Pinned) // -- GET /repos/{owner}/{repo}/notifications -- req = NewRequest(t, "GET", fmt.Sprintf("/api/v1/repos/%s/%s/notifications?status-types=unread&token=%s", user2.Name, repo1.Name, token)) @@ -74,8 +74,8 @@ func TestAPINotification(t *testing.T) { DecodeJSON(t, resp, &apiN) assert.EqualValues(t, 5, apiN.ID) - assert.EqualValues(t, false, apiN.Pinned) - assert.EqualValues(t, true, apiN.Unread) + assert.False(t, apiN.Pinned) + assert.True(t, apiN.Unread) assert.EqualValues(t, "issue4", apiN.Subject.Title) assert.EqualValues(t, "Issue", apiN.Subject.Type) assert.EqualValues(t, thread5.Issue.APIURL(), apiN.Subject.URL) diff --git a/integrations/api_oauth2_apps_test.go b/integrations/api_oauth2_apps_test.go index 0ba56b6c9fca..5c90dbb3bca1 100644 --- a/integrations/api_oauth2_apps_test.go +++ b/integrations/api_oauth2_apps_test.go @@ -123,7 +123,7 @@ func testAPIGetOAuth2Application(t *testing.T) { assert.EqualValues(t, existApp.ClientID, expectedApp.ClientID) assert.Len(t, expectedApp.ClientID, 36) assert.Empty(t, expectedApp.ClientSecret) - assert.EqualValues(t, len(expectedApp.RedirectURIs), 1) + assert.Len(t, expectedApp.RedirectURIs, 1) assert.EqualValues(t, existApp.RedirectURIs[0], expectedApp.RedirectURIs[0]) models.AssertExistsAndLoadBean(t, &models.OAuth2Application{ID: expectedApp.ID, Name: expectedApp.Name}) } @@ -156,7 +156,7 @@ func testAPIUpdateOAuth2Application(t *testing.T) { DecodeJSON(t, resp, &app) expectedApp := app - assert.EqualValues(t, len(expectedApp.RedirectURIs), 2) + assert.Len(t, expectedApp.RedirectURIs, 2) assert.EqualValues(t, expectedApp.RedirectURIs[0], appBody.RedirectURIs[0]) assert.EqualValues(t, expectedApp.RedirectURIs[1], appBody.RedirectURIs[1]) models.AssertExistsAndLoadBean(t, &models.OAuth2Application{ID: expectedApp.ID, Name: expectedApp.Name}) diff --git a/integrations/api_org_test.go b/integrations/api_org_test.go index 551da3032694..bc4428b99e15 100644 --- a/integrations/api_org_test.go +++ b/integrations/api_org_test.go @@ -69,7 +69,7 @@ func TestAPIOrgCreate(t *testing.T) { // user1 on this org is public var users []*api.User DecodeJSON(t, resp, &users) - assert.EqualValues(t, 1, len(users)) + assert.Len(t, users, 1) assert.EqualValues(t, "user1", users[0].UserName) }) } diff --git a/integrations/api_pull_commits_test.go b/integrations/api_pull_commits_test.go new file mode 100644 index 000000000000..30682d9c147e --- /dev/null +++ b/integrations/api_pull_commits_test.go @@ -0,0 +1,37 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "net/http" + "testing" + + "code.gitea.io/gitea/models" + api "code.gitea.io/gitea/modules/structs" + "github.com/stretchr/testify/assert" +) + +func TestAPIPullCommits(t *testing.T) { + defer prepareTestEnv(t)() + pullIssue := models.AssertExistsAndLoadBean(t, &models.PullRequest{ID: 2}).(*models.PullRequest) + assert.NoError(t, pullIssue.LoadIssue()) + repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: pullIssue.HeadRepoID}).(*models.Repository) + + session := loginUser(t, "user2") + req := NewRequestf(t, http.MethodGet, "/api/v1/repos/%s/%s/pulls/%d/commits", repo.OwnerName, repo.Name, pullIssue.Index) + resp := session.MakeRequest(t, req, http.StatusOK) + + var commits []*api.Commit + DecodeJSON(t, resp, &commits) + + if !assert.Len(t, commits, 2) { + return + } + + assert.Equal(t, "5f22f7d0d95d614d25a5b68592adb345a4b5c7fd", commits[0].SHA) + assert.Equal(t, "4a357436d925b5c974181ff12a994538ddc5a269", commits[1].SHA) +} + +// TODO add tests for already merged PR and closed PR diff --git a/integrations/api_pull_review_test.go b/integrations/api_pull_review_test.go index 19b05d545b0b..bcc0cbffcb38 100644 --- a/integrations/api_pull_review_test.go +++ b/integrations/api_pull_review_test.go @@ -11,6 +11,7 @@ import ( "code.gitea.io/gitea/models" api "code.gitea.io/gitea/modules/structs" + jsoniter "github.com/json-iterator/go" "github.com/stretchr/testify/assert" ) @@ -37,15 +38,15 @@ func TestAPIPullReview(t *testing.T) { assert.EqualValues(t, 8, reviews[3].ID) assert.EqualValues(t, "APPROVED", reviews[3].State) assert.EqualValues(t, 0, reviews[3].CodeCommentsCount) - assert.EqualValues(t, true, reviews[3].Stale) - assert.EqualValues(t, false, reviews[3].Official) + assert.True(t, reviews[3].Stale) + assert.False(t, reviews[3].Official) assert.EqualValues(t, 10, reviews[5].ID) assert.EqualValues(t, "REQUEST_CHANGES", reviews[5].State) assert.EqualValues(t, 1, reviews[5].CodeCommentsCount) assert.EqualValues(t, -1, reviews[5].Reviewer.ID) // ghost user - assert.EqualValues(t, false, reviews[5].Stale) - assert.EqualValues(t, true, reviews[5].Official) + assert.False(t, reviews[5].Stale) + assert.True(t, reviews[5].Official) // test GetPullReview req = NewRequestf(t, http.MethodGet, "/api/v1/repos/%s/%s/pulls/%d/reviews/%d?token=%s", repo.OwnerName, repo.Name, pullIssue.Index, reviews[3].ID, token) @@ -118,14 +119,14 @@ func TestAPIPullReview(t *testing.T) { resp = session.MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &review) assert.EqualValues(t, 6, review.ID) - assert.EqualValues(t, true, review.Dismissed) + assert.True(t, review.Dismissed) // test dismiss review req = NewRequest(t, http.MethodPost, fmt.Sprintf("/api/v1/repos/%s/%s/pulls/%d/reviews/%d/undismissals?token=%s", repo.OwnerName, repo.Name, pullIssue.Index, review.ID, token)) resp = session.MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &review) assert.EqualValues(t, 6, review.ID) - assert.EqualValues(t, false, review.Dismissed) + assert.False(t, review.Dismissed) // test DeletePullReview req = NewRequestWithJSON(t, http.MethodPost, fmt.Sprintf("/api/v1/repos/%s/%s/pulls/%d/reviews?token=%s", repo.OwnerName, repo.Name, pullIssue.Index, token), &api.CreatePullReviewOptions{ @@ -139,6 +140,59 @@ func TestAPIPullReview(t *testing.T) { req = NewRequestf(t, http.MethodDelete, "/api/v1/repos/%s/%s/pulls/%d/reviews/%d?token=%s", repo.OwnerName, repo.Name, pullIssue.Index, review.ID, token) resp = session.MakeRequest(t, req, http.StatusNoContent) + // test CreatePullReview Comment without body but with comments + req = NewRequestWithJSON(t, http.MethodPost, fmt.Sprintf("/api/v1/repos/%s/%s/pulls/%d/reviews?token=%s", repo.OwnerName, repo.Name, pullIssue.Index, token), &api.CreatePullReviewOptions{ + // Body: "", + Event: "COMMENT", + Comments: []api.CreatePullReviewComment{{ + Path: "README.md", + Body: "first new line", + OldLineNum: 0, + NewLineNum: 1, + }, { + Path: "README.md", + Body: "first old line", + OldLineNum: 1, + NewLineNum: 0, + }, + }, + }) + var commentReview api.PullReview + + resp = session.MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &commentReview) + assert.EqualValues(t, "COMMENT", commentReview.State) + assert.EqualValues(t, 2, commentReview.CodeCommentsCount) + assert.EqualValues(t, "", commentReview.Body) + assert.EqualValues(t, false, commentReview.Dismissed) + + // test CreatePullReview Comment with body but without comments + commentBody := "This is a body of the comment." + req = NewRequestWithJSON(t, http.MethodPost, fmt.Sprintf("/api/v1/repos/%s/%s/pulls/%d/reviews?token=%s", repo.OwnerName, repo.Name, pullIssue.Index, token), &api.CreatePullReviewOptions{ + Body: commentBody, + Event: "COMMENT", + Comments: []api.CreatePullReviewComment{}, + }) + + resp = session.MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &commentReview) + assert.EqualValues(t, "COMMENT", commentReview.State) + assert.EqualValues(t, 0, commentReview.CodeCommentsCount) + assert.EqualValues(t, commentBody, commentReview.Body) + assert.EqualValues(t, false, commentReview.Dismissed) + + // test CreatePullReview Comment without body and no comments + req = NewRequestWithJSON(t, http.MethodPost, fmt.Sprintf("/api/v1/repos/%s/%s/pulls/%d/reviews?token=%s", repo.OwnerName, repo.Name, pullIssue.Index, token), &api.CreatePullReviewOptions{ + Body: "", + Event: "COMMENT", + Comments: []api.CreatePullReviewComment{}, + }) + resp = session.MakeRequest(t, req, http.StatusUnprocessableEntity) + errMap := make(map[string]interface{}) + json := jsoniter.ConfigCompatibleWithStandardLibrary + json.Unmarshal(resp.Body.Bytes(), &errMap) + assert.EqualValues(t, "review event COMMENT requires a body or a comment", errMap["message"].(string)) + // test get review requests // to make it simple, use same api with get review pullIssue12 := models.AssertExistsAndLoadBean(t, &models.Issue{ID: 12}).(*models.Issue) @@ -151,15 +205,15 @@ func TestAPIPullReview(t *testing.T) { assert.EqualValues(t, 11, reviews[0].ID) assert.EqualValues(t, "REQUEST_REVIEW", reviews[0].State) assert.EqualValues(t, 0, reviews[0].CodeCommentsCount) - assert.EqualValues(t, false, reviews[0].Stale) - assert.EqualValues(t, true, reviews[0].Official) + assert.False(t, reviews[0].Stale) + assert.True(t, reviews[0].Official) assert.EqualValues(t, "test_team", reviews[0].ReviewerTeam.Name) assert.EqualValues(t, 12, reviews[1].ID) assert.EqualValues(t, "REQUEST_REVIEW", reviews[1].State) assert.EqualValues(t, 0, reviews[0].CodeCommentsCount) - assert.EqualValues(t, false, reviews[1].Stale) - assert.EqualValues(t, true, reviews[1].Official) + assert.False(t, reviews[1].Stale) + assert.True(t, reviews[1].Official) assert.EqualValues(t, 1, reviews[1].Reviewer.ID) } diff --git a/integrations/api_releases_test.go b/integrations/api_releases_test.go index 26bf752ccae9..027b282036f6 100644 --- a/integrations/api_releases_test.go +++ b/integrations/api_releases_test.go @@ -7,6 +7,7 @@ package integrations import ( "fmt" "net/http" + "net/url" "testing" "code.gitea.io/gitea/models" @@ -16,6 +17,58 @@ import ( "github.com/stretchr/testify/assert" ) +func TestAPIListReleases(t *testing.T) { + defer prepareTestEnv(t)() + + repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository) + user2 := models.AssertExistsAndLoadBean(t, &models.User{ID: 2}).(*models.User) + session := loginUser(t, user2.LowerName) + token := getTokenForLoggedInUser(t, session) + + link, _ := url.Parse(fmt.Sprintf("/api/v1/repos/%s/%s/releases", user2.Name, repo.Name)) + link.RawQuery = url.Values{"token": {token}}.Encode() + resp := session.MakeRequest(t, NewRequest(t, "GET", link.String()), http.StatusOK) + var apiReleases []*api.Release + DecodeJSON(t, resp, &apiReleases) + if assert.Len(t, apiReleases, 3) { + for _, release := range apiReleases { + switch release.ID { + case 1: + assert.False(t, release.IsDraft) + assert.False(t, release.IsPrerelease) + case 4: + assert.True(t, release.IsDraft) + assert.False(t, release.IsPrerelease) + case 5: + assert.False(t, release.IsDraft) + assert.True(t, release.IsPrerelease) + default: + assert.NoError(t, fmt.Errorf("unexpected release: %v", release)) + } + } + } + + // test filter + testFilterByLen := func(auth bool, query url.Values, expectedLength int, msgAndArgs ...string) { + link.RawQuery = query.Encode() + if auth { + query.Set("token", token) + resp = session.MakeRequest(t, NewRequest(t, "GET", link.String()), http.StatusOK) + } else { + resp = MakeRequest(t, NewRequest(t, "GET", link.String()), http.StatusOK) + } + DecodeJSON(t, resp, &apiReleases) + assert.Len(t, apiReleases, expectedLength, msgAndArgs) + } + + testFilterByLen(false, url.Values{"draft": {"true"}}, 0, "anon should not see drafts") + testFilterByLen(true, url.Values{"draft": {"true"}}, 1, "repo owner should see drafts") + testFilterByLen(true, url.Values{"draft": {"false"}}, 2, "exclude drafts") + testFilterByLen(true, url.Values{"draft": {"false"}, "pre-release": {"false"}}, 1, "exclude drafts and pre-releases") + testFilterByLen(true, url.Values{"pre-release": {"true"}}, 1, "only get pre-release") + testFilterByLen(true, url.Values{"draft": {"true"}, "pre-release": {"true"}}, 0, "there is no pre-release draft") +} + func createNewReleaseUsingAPI(t *testing.T, session *TestSession, token string, owner *models.User, repo *models.Repository, name, target, title, desc string) *api.Release { urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/releases?token=%s", owner.Name, repo.Name, token) diff --git a/integrations/api_repo_edit_test.go b/integrations/api_repo_edit_test.go index c1b513d0753c..618c1f0ad09b 100644 --- a/integrations/api_repo_edit_test.go +++ b/integrations/api_repo_edit_test.go @@ -130,11 +130,14 @@ func getNewRepoEditOption(opts *api.EditRepoOption) *api.EditRepoOption { func TestAPIRepoEdit(t *testing.T) { onGiteaRun(t, func(t *testing.T, u *url.URL) { + bFalse, bTrue := false, true + user2 := models.AssertExistsAndLoadBean(t, &models.User{ID: 2}).(*models.User) // owner of the repo1 & repo16 user3 := models.AssertExistsAndLoadBean(t, &models.User{ID: 3}).(*models.User) // owner of the repo3, is an org user4 := models.AssertExistsAndLoadBean(t, &models.User{ID: 4}).(*models.User) // owner of neither repos repo1 := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository) // public repo repo3 := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 3}).(*models.Repository) // public repo + repo15 := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 15}).(*models.Repository) // empty repo repo16 := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 16}).(*models.Repository) // private repo // Get user2's token @@ -286,9 +289,8 @@ func TestAPIRepoEdit(t *testing.T) { // Test making a repo public that is private repo16 = models.AssertExistsAndLoadBean(t, &models.Repository{ID: 16}).(*models.Repository) assert.True(t, repo16.IsPrivate) - private := false repoEditOption = &api.EditRepoOption{ - Private: &private, + Private: &bFalse, } url = fmt.Sprintf("/api/v1/repos/%s/%s?token=%s", user2.Name, repo16.Name, token2) req = NewRequestWithJSON(t, "PATCH", url, &repoEditOption) @@ -296,11 +298,24 @@ func TestAPIRepoEdit(t *testing.T) { repo16 = models.AssertExistsAndLoadBean(t, &models.Repository{ID: 16}).(*models.Repository) assert.False(t, repo16.IsPrivate) // Make it private again - private = true - repoEditOption.Private = &private + repoEditOption.Private = &bTrue req = NewRequestWithJSON(t, "PATCH", url, &repoEditOption) _ = session.MakeRequest(t, req, http.StatusOK) + // Test to change empty repo + assert.False(t, repo15.IsArchived) + url = fmt.Sprintf("/api/v1/repos/%s/%s?token=%s", user2.Name, repo15.Name, token2) + req = NewRequestWithJSON(t, "PATCH", url, &api.EditRepoOption{ + Archived: &bTrue, + }) + _ = session.MakeRequest(t, req, http.StatusOK) + repo15 = models.AssertExistsAndLoadBean(t, &models.Repository{ID: 15}).(*models.Repository) + assert.True(t, repo15.IsArchived) + req = NewRequestWithJSON(t, "PATCH", url, &api.EditRepoOption{ + Archived: &bFalse, + }) + _ = session.MakeRequest(t, req, http.StatusOK) + // Test using org repo "user3/repo3" where user2 is a collaborator origRepoEditOption = getRepoEditOptionFromRepo(repo3) repoEditOption = getNewRepoEditOption(origRepoEditOption) diff --git a/integrations/api_repo_lfs_locks_test.go b/integrations/api_repo_lfs_locks_test.go index 69981d1c4200..03549c11f4c2 100644 --- a/integrations/api_repo_lfs_locks_test.go +++ b/integrations/api_repo_lfs_locks_test.go @@ -11,6 +11,7 @@ import ( "time" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/lfs" "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" @@ -40,11 +41,11 @@ func TestAPILFSLocksNotLogin(t *testing.T) { repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository) req := NewRequestf(t, "GET", "/%s/%s.git/info/lfs/locks", user.Name, repo.Name) - req.Header.Set("Accept", "application/vnd.git-lfs+json") + req.Header.Set("Accept", lfs.MediaType) resp := MakeRequest(t, req, http.StatusUnauthorized) var lfsLockError api.LFSLockError DecodeJSON(t, resp, &lfsLockError) - assert.Equal(t, "Unauthorized", lfsLockError.Message) + assert.Equal(t, "You must have pull access to list locks", lfsLockError.Message) } func TestAPILFSLocksLogged(t *testing.T) { @@ -102,8 +103,8 @@ func TestAPILFSLocksLogged(t *testing.T) { for _, test := range tests { session := loginUser(t, test.user.Name) req := NewRequestWithJSON(t, "POST", fmt.Sprintf("/%s.git/info/lfs/locks", test.repo.FullName()), map[string]string{"path": test.path}) - req.Header.Set("Accept", "application/vnd.git-lfs+json") - req.Header.Set("Content-Type", "application/vnd.git-lfs+json") + req.Header.Set("Accept", lfs.MediaType) + req.Header.Set("Content-Type", lfs.MediaType) resp := session.MakeRequest(t, req, test.httpResult) if len(test.addTime) > 0 { var lfsLock api.LFSLockResponse @@ -119,7 +120,7 @@ func TestAPILFSLocksLogged(t *testing.T) { for _, test := range resultsTests { session := loginUser(t, test.user.Name) req := NewRequestf(t, "GET", "/%s.git/info/lfs/locks", test.repo.FullName()) - req.Header.Set("Accept", "application/vnd.git-lfs+json") + req.Header.Set("Accept", lfs.MediaType) resp := session.MakeRequest(t, req, http.StatusOK) var lfsLocks api.LFSLockList DecodeJSON(t, resp, &lfsLocks) @@ -131,8 +132,8 @@ func TestAPILFSLocksLogged(t *testing.T) { } req = NewRequestWithJSON(t, "POST", fmt.Sprintf("/%s.git/info/lfs/locks/verify", test.repo.FullName()), map[string]string{}) - req.Header.Set("Accept", "application/vnd.git-lfs+json") - req.Header.Set("Content-Type", "application/vnd.git-lfs+json") + req.Header.Set("Accept", lfs.MediaType) + req.Header.Set("Content-Type", lfs.MediaType) resp = session.MakeRequest(t, req, http.StatusOK) var lfsLocksVerify api.LFSLockListVerify DecodeJSON(t, resp, &lfsLocksVerify) @@ -155,8 +156,8 @@ func TestAPILFSLocksLogged(t *testing.T) { for _, test := range deleteTests { session := loginUser(t, test.user.Name) req := NewRequestWithJSON(t, "POST", fmt.Sprintf("/%s.git/info/lfs/locks/%s/unlock", test.repo.FullName(), test.lockID), map[string]string{}) - req.Header.Set("Accept", "application/vnd.git-lfs+json") - req.Header.Set("Content-Type", "application/vnd.git-lfs+json") + req.Header.Set("Accept", lfs.MediaType) + req.Header.Set("Content-Type", lfs.MediaType) resp := session.MakeRequest(t, req, http.StatusOK) var lfsLockRep api.LFSLockResponse DecodeJSON(t, resp, &lfsLockRep) @@ -168,7 +169,7 @@ func TestAPILFSLocksLogged(t *testing.T) { for _, test := range resultsTests { session := loginUser(t, test.user.Name) req := NewRequestf(t, "GET", "/%s.git/info/lfs/locks", test.repo.FullName()) - req.Header.Set("Accept", "application/vnd.git-lfs+json") + req.Header.Set("Accept", lfs.MediaType) resp := session.MakeRequest(t, req, http.StatusOK) var lfsLocks api.LFSLockList DecodeJSON(t, resp, &lfsLocks) diff --git a/integrations/api_repo_lfs_test.go b/integrations/api_repo_lfs_test.go new file mode 100644 index 000000000000..9e1e2b041894 --- /dev/null +++ b/integrations/api_repo_lfs_test.go @@ -0,0 +1,466 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "bytes" + "net/http" + "path" + "strconv" + "strings" + "testing" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/lfs" + "code.gitea.io/gitea/modules/setting" + + jsoniter "github.com/json-iterator/go" + "github.com/stretchr/testify/assert" +) + +func TestAPILFSNotStarted(t *testing.T) { + defer prepareTestEnv(t)() + + setting.LFS.StartServer = false + + user := models.AssertExistsAndLoadBean(t, &models.User{ID: 2}).(*models.User) + repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository) + + req := NewRequestf(t, "POST", "/%s/%s.git/info/lfs/objects/batch", user.Name, repo.Name) + MakeRequest(t, req, http.StatusNotFound) + req = NewRequestf(t, "PUT", "/%s/%s.git/info/lfs/objects/oid/10", user.Name, repo.Name) + MakeRequest(t, req, http.StatusNotFound) + req = NewRequestf(t, "GET", "/%s/%s.git/info/lfs/objects/oid/name", user.Name, repo.Name) + MakeRequest(t, req, http.StatusNotFound) + req = NewRequestf(t, "GET", "/%s/%s.git/info/lfs/objects/oid", user.Name, repo.Name) + MakeRequest(t, req, http.StatusNotFound) + req = NewRequestf(t, "POST", "/%s/%s.git/info/lfs/verify", user.Name, repo.Name) + MakeRequest(t, req, http.StatusNotFound) +} + +func TestAPILFSMediaType(t *testing.T) { + defer prepareTestEnv(t)() + + setting.LFS.StartServer = true + + user := models.AssertExistsAndLoadBean(t, &models.User{ID: 2}).(*models.User) + repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository) + + req := NewRequestf(t, "POST", "/%s/%s.git/info/lfs/objects/batch", user.Name, repo.Name) + MakeRequest(t, req, http.StatusUnsupportedMediaType) + req = NewRequestf(t, "POST", "/%s/%s.git/info/lfs/verify", user.Name, repo.Name) + MakeRequest(t, req, http.StatusUnsupportedMediaType) +} + +func createLFSTestRepository(t *testing.T, name string) *models.Repository { + ctx := NewAPITestContext(t, "user2", "lfs-"+name+"-repo") + t.Run("CreateRepo", doAPICreateRepository(ctx, false)) + + repo, err := models.GetRepositoryByOwnerAndName("user2", "lfs-"+name+"-repo") + assert.NoError(t, err) + + return repo +} + +func TestAPILFSBatch(t *testing.T) { + defer prepareTestEnv(t)() + + setting.LFS.StartServer = true + + repo := createLFSTestRepository(t, "batch") + + content := []byte("dummy1") + oid := storeObjectInRepo(t, repo.ID, &content) + defer repo.RemoveLFSMetaObjectByOid(oid) + + session := loginUser(t, "user2") + + newRequest := func(t testing.TB, br *lfs.BatchRequest) *http.Request { + req := NewRequestWithJSON(t, "POST", "/user2/lfs-batch-repo.git/info/lfs/objects/batch", br) + req.Header.Set("Accept", lfs.MediaType) + req.Header.Set("Content-Type", lfs.MediaType) + return req + } + decodeResponse := func(t *testing.T, b *bytes.Buffer) *lfs.BatchResponse { + var br lfs.BatchResponse + + json := jsoniter.ConfigCompatibleWithStandardLibrary + assert.NoError(t, json.Unmarshal(b.Bytes(), &br)) + return &br + } + + t.Run("InvalidJsonRequest", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := newRequest(t, nil) + + session.MakeRequest(t, req, http.StatusBadRequest) + }) + + t.Run("InvalidOperation", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := newRequest(t, &lfs.BatchRequest{ + Operation: "dummy", + }) + + session.MakeRequest(t, req, http.StatusBadRequest) + }) + + t.Run("InvalidPointer", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := newRequest(t, &lfs.BatchRequest{ + Operation: "download", + Objects: []lfs.Pointer{ + {Oid: "dummy"}, + {Oid: oid, Size: -1}, + }, + }) + + resp := session.MakeRequest(t, req, http.StatusOK) + br := decodeResponse(t, resp.Body) + assert.Len(t, br.Objects, 2) + assert.Equal(t, "dummy", br.Objects[0].Oid) + assert.Equal(t, oid, br.Objects[1].Oid) + assert.Equal(t, int64(0), br.Objects[0].Size) + assert.Equal(t, int64(-1), br.Objects[1].Size) + assert.NotNil(t, br.Objects[0].Error) + assert.NotNil(t, br.Objects[1].Error) + assert.Equal(t, http.StatusUnprocessableEntity, br.Objects[0].Error.Code) + assert.Equal(t, http.StatusUnprocessableEntity, br.Objects[1].Error.Code) + assert.Equal(t, "Oid or size are invalid", br.Objects[0].Error.Message) + assert.Equal(t, "Oid or size are invalid", br.Objects[1].Error.Message) + }) + + t.Run("PointerSizeMismatch", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := newRequest(t, &lfs.BatchRequest{ + Operation: "download", + Objects: []lfs.Pointer{ + {Oid: oid, Size: 1}, + }, + }) + + resp := session.MakeRequest(t, req, http.StatusOK) + br := decodeResponse(t, resp.Body) + assert.Len(t, br.Objects, 1) + assert.NotNil(t, br.Objects[0].Error) + assert.Equal(t, http.StatusUnprocessableEntity, br.Objects[0].Error.Code) + assert.Equal(t, "Object "+oid+" is not 1 bytes", br.Objects[0].Error.Message) + }) + + t.Run("Download", func(t *testing.T) { + defer PrintCurrentTest(t)() + + t.Run("PointerNotInStore", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := newRequest(t, &lfs.BatchRequest{ + Operation: "download", + Objects: []lfs.Pointer{ + {Oid: "fb8f7d8435968c4f82a726a92395be4d16f2f63116caf36c8ad35c60831ab042", Size: 6}, + }, + }) + + resp := session.MakeRequest(t, req, http.StatusOK) + br := decodeResponse(t, resp.Body) + assert.Len(t, br.Objects, 1) + assert.NotNil(t, br.Objects[0].Error) + assert.Equal(t, http.StatusNotFound, br.Objects[0].Error.Code) + }) + + t.Run("MetaNotFound", func(t *testing.T) { + defer PrintCurrentTest(t)() + + p := lfs.Pointer{Oid: "05eeb4eb5be71f2dd291ca39157d6d9effd7d1ea19cbdc8a99411fe2a8f26a00", Size: 6} + + contentStore := lfs.NewContentStore() + exist, err := contentStore.Exists(p) + assert.NoError(t, err) + assert.False(t, exist) + err = contentStore.Put(p, bytes.NewReader([]byte("dummy0"))) + assert.NoError(t, err) + + req := newRequest(t, &lfs.BatchRequest{ + Operation: "download", + Objects: []lfs.Pointer{p}, + }) + + resp := session.MakeRequest(t, req, http.StatusOK) + br := decodeResponse(t, resp.Body) + assert.Len(t, br.Objects, 1) + assert.NotNil(t, br.Objects[0].Error) + assert.Equal(t, http.StatusNotFound, br.Objects[0].Error.Code) + }) + + t.Run("Success", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := newRequest(t, &lfs.BatchRequest{ + Operation: "download", + Objects: []lfs.Pointer{ + {Oid: oid, Size: 6}, + }, + }) + + resp := session.MakeRequest(t, req, http.StatusOK) + br := decodeResponse(t, resp.Body) + assert.Len(t, br.Objects, 1) + assert.Nil(t, br.Objects[0].Error) + assert.Contains(t, br.Objects[0].Actions, "download") + l := br.Objects[0].Actions["download"] + assert.NotNil(t, l) + assert.NotEmpty(t, l.Href) + }) + }) + + t.Run("Upload", func(t *testing.T) { + defer PrintCurrentTest(t)() + + t.Run("FileTooBig", func(t *testing.T) { + defer PrintCurrentTest(t)() + + oldMaxFileSize := setting.LFS.MaxFileSize + setting.LFS.MaxFileSize = 2 + + req := newRequest(t, &lfs.BatchRequest{ + Operation: "upload", + Objects: []lfs.Pointer{ + {Oid: "fb8f7d8435968c4f82a726a92395be4d16f2f63116caf36c8ad35c60831ab042", Size: 6}, + }, + }) + + resp := session.MakeRequest(t, req, http.StatusOK) + br := decodeResponse(t, resp.Body) + assert.Len(t, br.Objects, 1) + assert.NotNil(t, br.Objects[0].Error) + assert.Equal(t, http.StatusUnprocessableEntity, br.Objects[0].Error.Code) + assert.Equal(t, "Size must be less than or equal to 2", br.Objects[0].Error.Message) + + setting.LFS.MaxFileSize = oldMaxFileSize + }) + + t.Run("AddMeta", func(t *testing.T) { + defer PrintCurrentTest(t)() + + p := lfs.Pointer{Oid: "05eeb4eb5be71f2dd291ca39157d6d9effd7d1ea19cbdc8a99411fe2a8f26a00", Size: 6} + + contentStore := lfs.NewContentStore() + exist, err := contentStore.Exists(p) + assert.NoError(t, err) + assert.True(t, exist) + + meta, err := repo.GetLFSMetaObjectByOid(p.Oid) + assert.Nil(t, meta) + assert.Equal(t, models.ErrLFSObjectNotExist, err) + + req := newRequest(t, &lfs.BatchRequest{ + Operation: "upload", + Objects: []lfs.Pointer{p}, + }) + + resp := session.MakeRequest(t, req, http.StatusOK) + br := decodeResponse(t, resp.Body) + assert.Len(t, br.Objects, 1) + assert.Nil(t, br.Objects[0].Error) + assert.Empty(t, br.Objects[0].Actions) + + meta, err = repo.GetLFSMetaObjectByOid(p.Oid) + assert.NoError(t, err) + assert.NotNil(t, meta) + }) + + t.Run("AlreadyExists", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := newRequest(t, &lfs.BatchRequest{ + Operation: "upload", + Objects: []lfs.Pointer{ + {Oid: oid, Size: 6}, + }, + }) + + resp := session.MakeRequest(t, req, http.StatusOK) + br := decodeResponse(t, resp.Body) + assert.Len(t, br.Objects, 1) + assert.Nil(t, br.Objects[0].Error) + assert.Empty(t, br.Objects[0].Actions) + }) + + t.Run("NewFile", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := newRequest(t, &lfs.BatchRequest{ + Operation: "upload", + Objects: []lfs.Pointer{ + {Oid: "d6f175817f886ec6fbbc1515326465fa96c3bfd54a4ea06cfd6dbbd8340e0153", Size: 1}, + }, + }) + + resp := session.MakeRequest(t, req, http.StatusOK) + br := decodeResponse(t, resp.Body) + assert.Len(t, br.Objects, 1) + assert.Nil(t, br.Objects[0].Error) + assert.Contains(t, br.Objects[0].Actions, "upload") + ul := br.Objects[0].Actions["upload"] + assert.NotNil(t, ul) + assert.NotEmpty(t, ul.Href) + assert.Contains(t, br.Objects[0].Actions, "verify") + vl := br.Objects[0].Actions["verify"] + assert.NotNil(t, vl) + assert.NotEmpty(t, vl.Href) + }) + }) +} + +func TestAPILFSUpload(t *testing.T) { + defer prepareTestEnv(t)() + + setting.LFS.StartServer = true + + repo := createLFSTestRepository(t, "upload") + + content := []byte("dummy3") + oid := storeObjectInRepo(t, repo.ID, &content) + defer repo.RemoveLFSMetaObjectByOid(oid) + + session := loginUser(t, "user2") + + newRequest := func(t testing.TB, p lfs.Pointer, content string) *http.Request { + req := NewRequestWithBody(t, "PUT", path.Join("/user2/lfs-upload-repo.git/info/lfs/objects/", p.Oid, strconv.FormatInt(p.Size, 10)), strings.NewReader(content)) + return req + } + + t.Run("InvalidPointer", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := newRequest(t, lfs.Pointer{Oid: "dummy"}, "") + + session.MakeRequest(t, req, http.StatusUnprocessableEntity) + }) + + t.Run("AlreadyExistsInStore", func(t *testing.T) { + defer PrintCurrentTest(t)() + + p := lfs.Pointer{Oid: "83de2e488b89a0aa1c97496b888120a28b0c1e15463a4adb8405578c540f36d4", Size: 6} + + contentStore := lfs.NewContentStore() + exist, err := contentStore.Exists(p) + assert.NoError(t, err) + assert.False(t, exist) + err = contentStore.Put(p, bytes.NewReader([]byte("dummy5"))) + assert.NoError(t, err) + + meta, err := repo.GetLFSMetaObjectByOid(p.Oid) + assert.Nil(t, meta) + assert.Equal(t, models.ErrLFSObjectNotExist, err) + + req := newRequest(t, p, "") + + session.MakeRequest(t, req, http.StatusOK) + + meta, err = repo.GetLFSMetaObjectByOid(p.Oid) + assert.NoError(t, err) + assert.NotNil(t, meta) + }) + + t.Run("MetaAlreadyExists", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := newRequest(t, lfs.Pointer{Oid: oid, Size: 6}, "") + + session.MakeRequest(t, req, http.StatusOK) + }) + + t.Run("HashMismatch", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := newRequest(t, lfs.Pointer{Oid: "2581dd7bbc1fe44726de4b7dd806a087a978b9c5aec0a60481259e34be09b06a", Size: 1}, "a") + + session.MakeRequest(t, req, http.StatusUnprocessableEntity) + }) + + t.Run("SizeMismatch", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := newRequest(t, lfs.Pointer{Oid: "ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb", Size: 2}, "a") + + session.MakeRequest(t, req, http.StatusUnprocessableEntity) + }) + + t.Run("Success", func(t *testing.T) { + defer PrintCurrentTest(t)() + + p := lfs.Pointer{Oid: "6ccce4863b70f258d691f59609d31b4502e1ba5199942d3bc5d35d17a4ce771d", Size: 5} + + req := newRequest(t, p, "gitea") + + session.MakeRequest(t, req, http.StatusOK) + + contentStore := lfs.NewContentStore() + exist, err := contentStore.Exists(p) + assert.NoError(t, err) + assert.True(t, exist) + + meta, err := repo.GetLFSMetaObjectByOid(p.Oid) + assert.NoError(t, err) + assert.NotNil(t, meta) + }) +} + +func TestAPILFSVerify(t *testing.T) { + defer prepareTestEnv(t)() + + setting.LFS.StartServer = true + + repo := createLFSTestRepository(t, "verify") + + content := []byte("dummy3") + oid := storeObjectInRepo(t, repo.ID, &content) + defer repo.RemoveLFSMetaObjectByOid(oid) + + session := loginUser(t, "user2") + + newRequest := func(t testing.TB, p *lfs.Pointer) *http.Request { + req := NewRequestWithJSON(t, "POST", "/user2/lfs-verify-repo.git/info/lfs/verify", p) + req.Header.Set("Accept", lfs.MediaType) + req.Header.Set("Content-Type", lfs.MediaType) + return req + } + + t.Run("InvalidJsonRequest", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := newRequest(t, nil) + + session.MakeRequest(t, req, http.StatusUnprocessableEntity) + }) + + t.Run("InvalidPointer", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := newRequest(t, &lfs.Pointer{}) + + session.MakeRequest(t, req, http.StatusUnprocessableEntity) + }) + + t.Run("PointerNotExisting", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := newRequest(t, &lfs.Pointer{Oid: "fb8f7d8435968c4f82a726a92395be4d16f2f63116caf36c8ad35c60831ab042", Size: 6}) + + session.MakeRequest(t, req, http.StatusNotFound) + }) + + t.Run("Success", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := newRequest(t, &lfs.Pointer{Oid: oid, Size: 6}) + + session.MakeRequest(t, req, http.StatusOK) + }) +} diff --git a/integrations/api_repo_tags_test.go b/integrations/api_repo_tags_test.go index 9988a4830017..0bf54d3a9598 100644 --- a/integrations/api_repo_tags_test.go +++ b/integrations/api_repo_tags_test.go @@ -5,6 +5,7 @@ package integrations import ( + "fmt" "net/http" "testing" @@ -15,23 +16,67 @@ import ( "github.com/stretchr/testify/assert" ) -func TestAPIReposGetTags(t *testing.T) { +func TestAPIRepoTags(t *testing.T) { defer prepareTestEnv(t)() user := models.AssertExistsAndLoadBean(t, &models.User{ID: 2}).(*models.User) // Login as User2. session := loginUser(t, user.Name) token := getTokenForLoggedInUser(t, session) - req := NewRequestf(t, "GET", "/api/v1/repos/%s/repo1/tags?token="+token, user.Name) + repoName := "repo1" + + req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/tags?token=%s", user.Name, repoName, token) resp := session.MakeRequest(t, req, http.StatusOK) var tags []*api.Tag DecodeJSON(t, resp, &tags) - assert.EqualValues(t, 1, len(tags)) + assert.Len(t, tags, 1) assert.Equal(t, "v1.1", tags[0].Name) + assert.Equal(t, "Initial commit", tags[0].Message) assert.Equal(t, "65f1bf27bc3bf70f64657658635e66094edbcb4d", tags[0].Commit.SHA) assert.Equal(t, setting.AppURL+"api/v1/repos/user2/repo1/git/commits/65f1bf27bc3bf70f64657658635e66094edbcb4d", tags[0].Commit.URL) assert.Equal(t, setting.AppURL+"user2/repo1/archive/v1.1.zip", tags[0].ZipballURL) assert.Equal(t, setting.AppURL+"user2/repo1/archive/v1.1.tar.gz", tags[0].TarballURL) + + newTag := createNewTagUsingAPI(t, session, token, user.Name, repoName, "gitea/22", "", "nice!\nand some text") + resp = session.MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &tags) + assert.Len(t, tags, 2) + for _, tag := range tags { + if tag.Name != "v1.1" { + assert.EqualValues(t, newTag.Name, tag.Name) + assert.EqualValues(t, newTag.Message, tag.Message) + assert.EqualValues(t, "nice!\nand some text", tag.Message) + assert.EqualValues(t, newTag.Commit.SHA, tag.Commit.SHA) + } + } + + // get created tag + req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/tags/%s?token=%s", user.Name, repoName, newTag.Name, token) + resp = session.MakeRequest(t, req, http.StatusOK) + var tag *api.Tag + DecodeJSON(t, resp, &tag) + assert.EqualValues(t, newTag, tag) + + // delete tag + delReq := NewRequestf(t, "DELETE", "/api/v1/repos/%s/%s/tags/%s?token=%s", user.Name, repoName, newTag.Name, token) + resp = session.MakeRequest(t, delReq, http.StatusNoContent) + + // check if it's gone + resp = session.MakeRequest(t, req, http.StatusNotFound) +} + +func createNewTagUsingAPI(t *testing.T, session *TestSession, token string, ownerName, repoName, name, target, msg string) *api.Tag { + urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/tags?token=%s", ownerName, repoName, token) + req := NewRequestWithJSON(t, "POST", urlStr, &api.CreateTagOption{ + TagName: name, + Message: msg, + Target: target, + }) + resp := session.MakeRequest(t, req, http.StatusCreated) + + var respObj api.Tag + DecodeJSON(t, resp, &respObj) + return &respObj } diff --git a/integrations/api_repo_teams_test.go b/integrations/api_repo_teams_test.go index a07b58034685..4a155130b935 100644 --- a/integrations/api_repo_teams_test.go +++ b/integrations/api_repo_teams_test.go @@ -33,12 +33,12 @@ func TestAPIRepoTeams(t *testing.T) { DecodeJSON(t, res, &teams) if assert.Len(t, teams, 2) { assert.EqualValues(t, "Owners", teams[0].Name) - assert.EqualValues(t, false, teams[0].CanCreateOrgRepo) + assert.False(t, teams[0].CanCreateOrgRepo) assert.EqualValues(t, []string{"repo.code", "repo.issues", "repo.pulls", "repo.releases", "repo.wiki", "repo.ext_wiki", "repo.ext_issues"}, teams[0].Units) assert.EqualValues(t, "owner", teams[0].Permission) assert.EqualValues(t, "test_team", teams[1].Name) - assert.EqualValues(t, false, teams[1].CanCreateOrgRepo) + assert.False(t, teams[1].CanCreateOrgRepo) assert.EqualValues(t, []string{"repo.issues"}, teams[1].Units) assert.EqualValues(t, "write", teams[1].Permission) } diff --git a/integrations/api_repo_test.go b/integrations/api_repo_test.go index cfd3b58d649d..3948489f56fc 100644 --- a/integrations/api_repo_test.go +++ b/integrations/api_repo_test.go @@ -466,7 +466,7 @@ func TestAPIRepoTransfer(t *testing.T) { session := loginUser(t, user.Name) token := getTokenForLoggedInUser(t, session) repoName := "moveME" - repo := new(models.Repository) + apiRepo := new(api.Repository) req := NewRequestWithJSON(t, "POST", fmt.Sprintf("/api/v1/user/repos?token=%s", token), &api.CreateRepoOption{ Name: repoName, Description: "repo move around", @@ -475,12 +475,12 @@ func TestAPIRepoTransfer(t *testing.T) { AutoInit: true, }) resp := session.MakeRequest(t, req, http.StatusCreated) - DecodeJSON(t, resp, repo) + DecodeJSON(t, resp, apiRepo) //start testing for _, testCase := range testCases { user = models.AssertExistsAndLoadBean(t, &models.User{ID: testCase.ctxUserID}).(*models.User) - repo = models.AssertExistsAndLoadBean(t, &models.Repository{ID: repo.ID}).(*models.Repository) + repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: apiRepo.ID}).(*models.Repository) session = loginUser(t, user.Name) token = getTokenForLoggedInUser(t, session) req = NewRequestWithJSON(t, "POST", fmt.Sprintf("/api/v1/repos/%s/%s/transfer?token=%s", repo.OwnerName, repo.Name, token), &api.TransferRepoOption{ @@ -491,6 +491,71 @@ func TestAPIRepoTransfer(t *testing.T) { } //cleanup - repo = models.AssertExistsAndLoadBean(t, &models.Repository{ID: repo.ID}).(*models.Repository) + repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: apiRepo.ID}).(*models.Repository) _ = models.DeleteRepository(user, repo.OwnerID, repo.ID) } + +func TestAPIGenerateRepo(t *testing.T) { + defer prepareTestEnv(t)() + + user := models.AssertExistsAndLoadBean(t, &models.User{ID: 1}).(*models.User) + session := loginUser(t, user.Name) + token := getTokenForLoggedInUser(t, session) + + templateRepo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 44}).(*models.Repository) + + // user + repo := new(api.Repository) + req := NewRequestWithJSON(t, "POST", fmt.Sprintf("/api/v1/repos/%s/%s/generate?token=%s", templateRepo.OwnerName, templateRepo.Name, token), &api.GenerateRepoOption{ + Owner: user.Name, + Name: "new-repo", + Description: "test generate repo", + Private: false, + GitContent: true, + }) + resp := session.MakeRequest(t, req, http.StatusCreated) + DecodeJSON(t, resp, repo) + + assert.Equal(t, "new-repo", repo.Name) + + // org + req = NewRequestWithJSON(t, "POST", fmt.Sprintf("/api/v1/repos/%s/%s/generate?token=%s", templateRepo.OwnerName, templateRepo.Name, token), &api.GenerateRepoOption{ + Owner: "user3", + Name: "new-repo", + Description: "test generate repo", + Private: false, + GitContent: true, + }) + resp = session.MakeRequest(t, req, http.StatusCreated) + DecodeJSON(t, resp, repo) + + assert.Equal(t, "new-repo", repo.Name) +} + +func TestAPIRepoGetReviewers(t *testing.T) { + defer prepareTestEnv(t)() + user := models.AssertExistsAndLoadBean(t, &models.User{ID: 2}).(*models.User) + session := loginUser(t, user.Name) + token := getTokenForLoggedInUser(t, session) + repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository) + + req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/reviewers?token=%s", user.Name, repo.Name, token) + resp := session.MakeRequest(t, req, http.StatusOK) + var reviewers []*api.User + DecodeJSON(t, resp, &reviewers) + assert.Len(t, reviewers, 4) +} + +func TestAPIRepoGetAssignees(t *testing.T) { + defer prepareTestEnv(t)() + user := models.AssertExistsAndLoadBean(t, &models.User{ID: 2}).(*models.User) + session := loginUser(t, user.Name) + token := getTokenForLoggedInUser(t, session) + repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository) + + req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/assignees?token=%s", user.Name, repo.Name, token) + resp := session.MakeRequest(t, req, http.StatusOK) + var assignees []*api.User + DecodeJSON(t, resp, &assignees) + assert.Len(t, assignees, 1) +} diff --git a/integrations/api_repo_topic_test.go b/integrations/api_repo_topic_test.go index b96489ae2211..5e42bc64bf92 100644 --- a/integrations/api_repo_topic_test.go +++ b/integrations/api_repo_topic_test.go @@ -88,7 +88,7 @@ func TestAPIRepoTopic(t *testing.T) { req = NewRequest(t, "GET", url) res = session.MakeRequest(t, req, http.StatusOK) DecodeJSON(t, res, &topics) - assert.Equal(t, 25, len(topics.TopicNames)) + assert.Len(t, topics.TopicNames, 25) // Test writing more topics than allowed newTopics = append(newTopics, "t26") @@ -115,7 +115,7 @@ func TestAPIRepoTopic(t *testing.T) { req = NewRequest(t, "GET", url) res = session.MakeRequest(t, req, http.StatusOK) DecodeJSON(t, res, &topics) - assert.Equal(t, 0, len(topics.TopicNames)) + assert.Empty(t, topics.TopicNames) // Test add a topic to repo with write access (requires repo admin access) req = NewRequestf(t, "PUT", "/api/v1/repos/%s/%s/topics/%s?token=%s", user3.Name, repo3.Name, "topicName", token4) diff --git a/integrations/api_team_test.go b/integrations/api_team_test.go index d89385447092..0b77dc3be700 100644 --- a/integrations/api_team_test.go +++ b/integrations/api_team_test.go @@ -144,17 +144,21 @@ func TestAPITeamSearch(t *testing.T) { var results TeamSearchResults session := loginUser(t, user.Name) + csrf := GetCSRF(t, session, "/"+org.Name) req := NewRequestf(t, "GET", "/api/v1/orgs/%s/teams/search?q=%s", org.Name, "_team") + req.Header.Add("X-Csrf-Token", csrf) resp := session.MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &results) assert.NotEmpty(t, results.Data) - assert.Equal(t, 1, len(results.Data)) + assert.Len(t, results.Data, 1) assert.Equal(t, "test_team", results.Data[0].Name) // no access if not organization member user5 := models.AssertExistsAndLoadBean(t, &models.User{ID: 5}).(*models.User) session = loginUser(t, user5.Name) + csrf = GetCSRF(t, session, "/"+org.Name) req = NewRequestf(t, "GET", "/api/v1/orgs/%s/teams/search?q=%s", org.Name, "team") + req.Header.Add("X-Csrf-Token", csrf) resp = session.MakeRequest(t, req, http.StatusForbidden) } diff --git a/integrations/api_user_email_test.go b/integrations/api_user_email_test.go index 8d0a0cdf1b27..9d2b7485d852 100644 --- a/integrations/api_user_email_test.go +++ b/integrations/api_user_email_test.go @@ -33,7 +33,7 @@ func TestAPIListEmails(t *testing.T) { Primary: true, }, { - Email: "user21@example.com", + Email: "user2-2@example.com", Verified: false, Primary: false, }, @@ -55,7 +55,7 @@ func TestAPIAddEmail(t *testing.T) { session.MakeRequest(t, req, http.StatusUnprocessableEntity) opts = api.CreateEmailOption{ - Emails: []string{"user22@example.com"}, + Emails: []string{"user2-3@example.com"}, } req = NewRequestWithJSON(t, "POST", "/api/v1/user/emails?token="+token, &opts) resp := session.MakeRequest(t, req, http.StatusCreated) @@ -64,7 +64,7 @@ func TestAPIAddEmail(t *testing.T) { DecodeJSON(t, resp, &emails) assert.EqualValues(t, []*api.Email{ { - Email: "user22@example.com", + Email: "user2-3@example.com", Verified: true, Primary: false, }, @@ -79,13 +79,13 @@ func TestAPIDeleteEmail(t *testing.T) { token := getTokenForLoggedInUser(t, session) opts := api.DeleteEmailOption{ - Emails: []string{"user22@example.com"}, + Emails: []string{"user2-3@example.com"}, } req := NewRequestWithJSON(t, "DELETE", "/api/v1/user/emails?token="+token, &opts) session.MakeRequest(t, req, http.StatusNotFound) opts = api.DeleteEmailOption{ - Emails: []string{"user21@example.com"}, + Emails: []string{"user2-2@example.com"}, } req = NewRequestWithJSON(t, "DELETE", "/api/v1/user/emails?token="+token, &opts) session.MakeRequest(t, req, http.StatusNoContent) diff --git a/integrations/api_user_heatmap_test.go b/integrations/api_user_heatmap_test.go index 105d39e9ae2d..a0f0552a1794 100644 --- a/integrations/api_user_heatmap_test.go +++ b/integrations/api_user_heatmap_test.go @@ -26,7 +26,7 @@ func TestUserHeatmap(t *testing.T) { var heatmap []*models.UserHeatmapData DecodeJSON(t, resp, &heatmap) var dummyheatmap []*models.UserHeatmapData - dummyheatmap = append(dummyheatmap, &models.UserHeatmapData{Timestamp: 1603152000, Contributions: 1}) + dummyheatmap = append(dummyheatmap, &models.UserHeatmapData{Timestamp: 1603227600, Contributions: 1}) assert.Equal(t, dummyheatmap, heatmap) } diff --git a/integrations/api_user_search_test.go b/integrations/api_user_search_test.go index c5295fbba5da..f7349827e580 100644 --- a/integrations/api_user_search_test.go +++ b/integrations/api_user_search_test.go @@ -59,3 +59,34 @@ func TestAPIUserSearchNotLoggedIn(t *testing.T) { } } } + +func TestAPIUserSearchAdminLoggedInUserHidden(t *testing.T) { + defer prepareTestEnv(t)() + adminUsername := "user1" + session := loginUser(t, adminUsername) + token := getTokenForLoggedInUser(t, session) + query := "user31" + req := NewRequestf(t, "GET", "/api/v1/users/search?token=%s&q=%s", token, query) + req.SetBasicAuth(token, "x-oauth-basic") + resp := session.MakeRequest(t, req, http.StatusOK) + + var results SearchResults + DecodeJSON(t, resp, &results) + assert.NotEmpty(t, results.Data) + for _, user := range results.Data { + assert.Contains(t, user.UserName, query) + assert.NotEmpty(t, user.Email) + assert.EqualValues(t, "private", user.Visibility) + } +} + +func TestAPIUserSearchNotLoggedInUserHidden(t *testing.T) { + defer prepareTestEnv(t)() + query := "user31" + req := NewRequestf(t, "GET", "/api/v1/users/search?q=%s", query) + resp := MakeRequest(t, req, http.StatusOK) + + var results SearchResults + DecodeJSON(t, resp, &results) + assert.Empty(t, results.Data) +} diff --git a/integrations/compare_test.go b/integrations/compare_test.go new file mode 100644 index 000000000000..08468cbfd6e9 --- /dev/null +++ b/integrations/compare_test.go @@ -0,0 +1,24 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "net/http" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestCompareTag(t *testing.T) { + defer prepareTestEnv(t)() + + session := loginUser(t, "user2") + req := NewRequest(t, "GET", "/user2/repo1/compare/v1.1...master") + resp := session.MakeRequest(t, req, http.StatusOK) + htmlDoc := NewHTMLParser(t, resp.Body) + selection := htmlDoc.doc.Find(".choose.branch .filter.dropdown") + // A dropdown for both base and head. + assert.Lenf(t, selection.Nodes, 2, "The template has changed") +} diff --git a/integrations/create_no_session_test.go b/integrations/create_no_session_test.go index c864b9c7ae12..46f111b6f7d8 100644 --- a/integrations/create_no_session_test.go +++ b/integrations/create_no_session_test.go @@ -14,7 +14,7 @@ import ( "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" - "code.gitea.io/gitea/routers/routes" + "code.gitea.io/gitea/routers" "gitea.com/go-chi/session" jsoniter "github.com/json-iterator/go" @@ -58,7 +58,7 @@ func TestSessionFileCreation(t *testing.T) { oldSessionConfig := setting.SessionConfig.ProviderConfig defer func() { setting.SessionConfig.ProviderConfig = oldSessionConfig - c = routes.NormalRoutes() + c = routers.NormalRoutes() }() var config session.Options @@ -84,7 +84,7 @@ func TestSessionFileCreation(t *testing.T) { setting.SessionConfig.ProviderConfig = string(newConfigBytes) - c = routes.NormalRoutes() + c = routers.NormalRoutes() t.Run("NoSessionOnViewIssue", func(t *testing.T) { defer PrintCurrentTest(t)() diff --git a/integrations/download_test.go b/integrations/download_test.go index 305155e9ace4..38de75f476a9 100644 --- a/integrations/download_test.go +++ b/integrations/download_test.go @@ -8,6 +8,7 @@ import ( "net/http" "testing" + "code.gitea.io/gitea/modules/setting" "github.com/stretchr/testify/assert" ) @@ -62,3 +63,30 @@ func TestDownloadByIDMediaForSVGUsesSecureHeaders(t *testing.T) { assert.Equal(t, "image/svg+xml", resp.HeaderMap.Get("Content-Type")) assert.Equal(t, "nosniff", resp.HeaderMap.Get("X-Content-Type-Options")) } + +func TestDownloadRawTextFileWithoutMimeTypeMapping(t *testing.T) { + defer prepareTestEnv(t)() + + session := loginUser(t, "user2") + + req := NewRequest(t, "GET", "/user2/repo2/raw/branch/master/test.xml") + resp := session.MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, "text/plain; charset=utf-8", resp.HeaderMap.Get("Content-Type")) +} + +func TestDownloadRawTextFileWithMimeTypeMapping(t *testing.T) { + defer prepareTestEnv(t)() + setting.MimeTypeMap.Map[".xml"] = "text/xml" + setting.MimeTypeMap.Enabled = true + + session := loginUser(t, "user2") + + req := NewRequest(t, "GET", "/user2/repo2/raw/branch/master/test.xml") + resp := session.MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, "text/xml; charset=utf-8", resp.HeaderMap.Get("Content-Type")) + + delete(setting.MimeTypeMap.Map, ".xml") + setting.MimeTypeMap.Enabled = false +} diff --git a/integrations/git_smart_http_test.go b/integrations/git_smart_http_test.go new file mode 100644 index 000000000000..9a4e3689c1ce --- /dev/null +++ b/integrations/git_smart_http_test.go @@ -0,0 +1,69 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "io/ioutil" + "net/http" + "net/url" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGitSmartHTTP(t *testing.T) { + onGiteaRun(t, testGitSmartHTTP) +} + +func testGitSmartHTTP(t *testing.T, u *url.URL) { + var kases = []struct { + p string + code int + }{ + { + p: "user2/repo1/info/refs", + code: 200, + }, + { + p: "user2/repo1/HEAD", + code: 200, + }, + { + p: "user2/repo1/objects/info/alternates", + code: 404, + }, + { + p: "user2/repo1/objects/info/http-alternates", + code: 404, + }, + { + p: "user2/repo1/../../custom/conf/app.ini", + code: 404, + }, + { + p: "user2/repo1/objects/info/../../../../custom/conf/app.ini", + code: 404, + }, + { + p: `user2/repo1/objects/info/..\..\..\..\custom\conf\app.ini`, + code: 400, + }, + } + + for _, kase := range kases { + t.Run(kase.p, func(t *testing.T) { + p := u.String() + kase.p + req, err := http.NewRequest("GET", p, nil) + assert.NoError(t, err) + req.SetBasicAuth("user2", userPassword) + resp, err := http.DefaultClient.Do(req) + assert.NoError(t, err) + defer resp.Body.Close() + assert.EqualValues(t, kase.code, resp.StatusCode) + _, err = ioutil.ReadAll(resp.Body) + assert.NoError(t, err) + }) + } +} diff --git a/integrations/git_test.go b/integrations/git_test.go index 13a60076a7e9..a9848eaa4c30 100644 --- a/integrations/git_test.go +++ b/integrations/git_test.go @@ -143,7 +143,7 @@ func standardCommitAndPushTest(t *testing.T, dstPath string) (little, big string func lfsCommitAndPushTest(t *testing.T, dstPath string) (littleLFS, bigLFS string) { t.Run("LFS", func(t *testing.T) { defer PrintCurrentTest(t)() - setting.CheckLFSVersion() + git.CheckLFSVersion() if !setting.LFS.StartServer { t.Skip() return @@ -213,7 +213,7 @@ func rawTest(t *testing.T, ctx *APITestContext, little, big, littleLFS, bigLFS s resp := session.MakeRequestNilResponseRecorder(t, req, http.StatusOK) assert.Equal(t, littleSize, resp.Length) - setting.CheckLFSVersion() + git.CheckLFSVersion() if setting.LFS.StartServer { req = NewRequest(t, "GET", path.Join("/", username, reponame, "/raw/branch/master/", littleLFS)) resp := session.MakeRequest(t, req, http.StatusOK) @@ -255,7 +255,7 @@ func mediaTest(t *testing.T, ctx *APITestContext, little, big, littleLFS, bigLFS resp := session.MakeRequestNilResponseRecorder(t, req, http.StatusOK) assert.Equal(t, littleSize, resp.Length) - setting.CheckLFSVersion() + git.CheckLFSVersion() if setting.LFS.StartServer { req = NewRequest(t, "GET", path.Join("/", username, reponame, "/media/branch/master/", littleLFS)) resp = session.MakeRequestNilResponseRecorder(t, req, http.StatusOK) diff --git a/integrations/gitea-repositories-meta/user2/repo2.git/objects/10/32bbf17fbc0d9c95bb5418dabe8f8c99278700 b/integrations/gitea-repositories-meta/user2/repo2.git/objects/10/32bbf17fbc0d9c95bb5418dabe8f8c99278700 new file mode 100644 index 000000000000..736e40878edf --- /dev/null +++ b/integrations/gitea-repositories-meta/user2/repo2.git/objects/10/32bbf17fbc0d9c95bb5418dabe8f8c99278700 @@ -0,0 +1,2 @@ +xK +0Eg %":u􊕦J|p˭Q~% 9لG6G ͦw(E4}*{)`YƆleMJOܚ>%^ݿL!]N[v#E6U~/0 ZU'gpJ5 \ No newline at end of file diff --git a/integrations/gitea-repositories-meta/user2/repo2.git/objects/26/f842bcad37fa40a1bb34cbb5ee219ee35d863d b/integrations/gitea-repositories-meta/user2/repo2.git/objects/26/f842bcad37fa40a1bb34cbb5ee219ee35d863d new file mode 100644 index 000000000000..c3e7e778c5bc Binary files /dev/null and b/integrations/gitea-repositories-meta/user2/repo2.git/objects/26/f842bcad37fa40a1bb34cbb5ee219ee35d863d differ diff --git a/integrations/gitea-repositories-meta/user2/repo2.git/objects/ba/1aed4e2ea2443d76cec241b96be4ec990852ec b/integrations/gitea-repositories-meta/user2/repo2.git/objects/ba/1aed4e2ea2443d76cec241b96be4ec990852ec new file mode 100644 index 000000000000..add9a3af0d4c Binary files /dev/null and b/integrations/gitea-repositories-meta/user2/repo2.git/objects/ba/1aed4e2ea2443d76cec241b96be4ec990852ec differ diff --git a/integrations/gitea-repositories-meta/user2/repo2.git/refs/heads/master b/integrations/gitea-repositories-meta/user2/repo2.git/refs/heads/master index 10967a9b8a0b..334d09ca0215 100644 --- a/integrations/gitea-repositories-meta/user2/repo2.git/refs/heads/master +++ b/integrations/gitea-repositories-meta/user2/repo2.git/refs/heads/master @@ -1 +1 @@ -205ac761f3326a7ebe416e8673760016450b5cec +1032bbf17fbc0d9c95bb5418dabe8f8c99278700 diff --git a/integrations/gitea-repositories-meta/user27/repo49.git/refs/heads/test/archive b/integrations/gitea-repositories-meta/user27/repo49.git/refs/heads/test/archive new file mode 100644 index 000000000000..0f13243bfd64 --- /dev/null +++ b/integrations/gitea-repositories-meta/user27/repo49.git/refs/heads/test/archive @@ -0,0 +1 @@ +aacbdfe9e1c4b47f60abe81849045fa4e96f1d75 diff --git a/integrations/goget_test.go b/integrations/goget_test.go new file mode 100644 index 000000000000..1003d710238e --- /dev/null +++ b/integrations/goget_test.go @@ -0,0 +1,35 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "fmt" + "net/http" + "testing" + + "code.gitea.io/gitea/modules/setting" + "github.com/stretchr/testify/assert" +) + +func TestGoGet(t *testing.T) { + defer prepareTestEnv(t)() + + req := NewRequest(t, "GET", "/blah/glah/plah?go-get=1") + resp := MakeRequest(t, req, http.StatusOK) + + expected := fmt.Sprintf(` + + + + + + + go get --insecure %[1]s:%[2]s/blah/glah + + +`, setting.Domain, setting.HTTPPort, setting.AppURL) + + assert.Equal(t, expected, resp.Body.String()) +} diff --git a/integrations/integration_test.go b/integrations/integration_test.go index 74227416c4be..8a008ac62176 100644 --- a/integrations/integration_test.go +++ b/integrations/integration_test.go @@ -26,6 +26,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/base" + "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/queue" @@ -34,7 +35,6 @@ import ( "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/routers" - "code.gitea.io/gitea/routers/routes" "github.com/PuerkitoBio/goquery" jsoniter "github.com/json-iterator/go" @@ -88,7 +88,7 @@ func TestMain(m *testing.M) { defer cancel() initIntegrationTest() - c = routes.NormalRoutes() + c = routers.NormalRoutes() // integration test settings... if setting.Cfg != nil { @@ -163,7 +163,7 @@ func initIntegrationTest() { setting.SetCustomPathAndConf("", "", "") setting.NewContext() util.RemoveAll(models.LocalCopyPath()) - setting.CheckLFSVersion() + git.CheckLFSVersion() setting.InitDBConfig() if err := storage.Init(); err != nil { fmt.Printf("Init storage failed: %v", err) diff --git a/integrations/lfs_getobject_test.go b/integrations/lfs_getobject_test.go index 789c7572a77e..c99500f469ea 100644 --- a/integrations/lfs_getobject_test.go +++ b/integrations/lfs_getobject_test.go @@ -13,29 +13,21 @@ import ( "testing" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/lfs" "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/routers/routes" + "code.gitea.io/gitea/routers/web" + jsoniter "github.com/json-iterator/go" gzipp "github.com/klauspost/compress/gzip" "github.com/stretchr/testify/assert" ) -var lfsID = int64(20000) - func storeObjectInRepo(t *testing.T, repositoryID int64, content *[]byte) string { pointer, err := lfs.GeneratePointer(bytes.NewReader(*content)) assert.NoError(t, err) - var lfsMetaObject *models.LFSMetaObject - if setting.Database.UsePostgreSQL { - lfsMetaObject = &models.LFSMetaObject{ID: lfsID, Pointer: pointer, RepositoryID: repositoryID} - } else { - lfsMetaObject = &models.LFSMetaObject{Pointer: pointer, RepositoryID: repositoryID} - } - - lfsID++ - lfsMetaObject, err = models.NewLFSMetaObject(lfsMetaObject) + _, err = models.NewLFSMetaObject(&models.LFSMetaObject{Pointer: pointer, RepositoryID: repositoryID}) assert.NoError(t, err) contentStore := lfs.NewContentStore() exist, err := contentStore.Exists(pointer) @@ -90,7 +82,7 @@ func checkResponseTestContentEncoding(t *testing.T, content *[]byte, resp *httpt func TestGetLFSSmall(t *testing.T) { defer prepareTestEnv(t)() - setting.CheckLFSVersion() + git.CheckLFSVersion() if !setting.LFS.StartServer { t.Skip() return @@ -103,12 +95,12 @@ func TestGetLFSSmall(t *testing.T) { func TestGetLFSLarge(t *testing.T) { defer prepareTestEnv(t)() - setting.CheckLFSVersion() + git.CheckLFSVersion() if !setting.LFS.StartServer { t.Skip() return } - content := make([]byte, routes.GzipMinSize*10) + content := make([]byte, web.GzipMinSize*10) for i := range content { content[i] = byte(i % 256) } @@ -119,12 +111,12 @@ func TestGetLFSLarge(t *testing.T) { func TestGetLFSGzip(t *testing.T) { defer prepareTestEnv(t)() - setting.CheckLFSVersion() + git.CheckLFSVersion() if !setting.LFS.StartServer { t.Skip() return } - b := make([]byte, routes.GzipMinSize*10) + b := make([]byte, web.GzipMinSize*10) for i := range b { b[i] = byte(i % 256) } @@ -140,12 +132,12 @@ func TestGetLFSGzip(t *testing.T) { func TestGetLFSZip(t *testing.T) { defer prepareTestEnv(t)() - setting.CheckLFSVersion() + git.CheckLFSVersion() if !setting.LFS.StartServer { t.Skip() return } - b := make([]byte, routes.GzipMinSize*10) + b := make([]byte, web.GzipMinSize*10) for i := range b { b[i] = byte(i % 256) } @@ -163,7 +155,7 @@ func TestGetLFSZip(t *testing.T) { func TestGetLFSRangeNo(t *testing.T) { defer prepareTestEnv(t)() - setting.CheckLFSVersion() + git.CheckLFSVersion() if !setting.LFS.StartServer { t.Skip() return @@ -176,7 +168,7 @@ func TestGetLFSRangeNo(t *testing.T) { func TestGetLFSRange(t *testing.T) { defer prepareTestEnv(t)() - setting.CheckLFSVersion() + git.CheckLFSVersion() if !setting.LFS.StartServer { t.Skip() return @@ -210,7 +202,14 @@ func TestGetLFSRange(t *testing.T) { "Range": []string{tt.in}, } resp := storeAndGetLfs(t, &content, &h, tt.status) - assert.Equal(t, tt.out, resp.Body.String()) + if tt.status == http.StatusPartialContent || tt.status == http.StatusOK { + assert.Equal(t, tt.out, resp.Body.String()) + } else { + var er lfs.ErrorResponse + err := jsoniter.Unmarshal(resp.Body.Bytes(), &er) + assert.NoError(t, err) + assert.Equal(t, tt.out, er.Message) + } }) } } diff --git a/integrations/links_test.go b/integrations/links_test.go index 3b9c245fc37c..03229e10e122 100644 --- a/integrations/links_test.go +++ b/integrations/links_test.go @@ -35,6 +35,8 @@ func TestLinksNoLogin(t *testing.T) { "/user2/repo1", "/user2/repo1/projects", "/user2/repo1/projects/1", + "/assets/img/404.png", + "/assets/img/500.png", } for _, link := range links { diff --git a/integrations/migration-test/migration_test.go b/integrations/migration-test/migration_test.go index 852c0b737c2d..209ff5a058f4 100644 --- a/integrations/migration-test/migration_test.go +++ b/integrations/migration-test/migration_test.go @@ -23,6 +23,7 @@ import ( "code.gitea.io/gitea/models/migrations" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/charset" + "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" @@ -61,7 +62,7 @@ func initMigrationTest(t *testing.T) func() { assert.NoError(t, util.RemoveAll(setting.RepoRootPath)) assert.NoError(t, util.CopyDir(path.Join(filepath.Dir(setting.AppPath), "integrations/gitea-repositories-meta"), setting.RepoRootPath)) - setting.CheckLFSVersion() + git.CheckLFSVersion() setting.InitDBConfig() setting.NewLogServices(true) return deferFn diff --git a/services/mirror/mirror_test.go b/integrations/mirror_pull_test.go similarity index 89% rename from services/mirror/mirror_test.go rename to integrations/mirror_pull_test.go index 20492c784bdb..3908f355575d 100644 --- a/services/mirror/mirror_test.go +++ b/integrations/mirror_pull_test.go @@ -2,28 +2,24 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package mirror +package integrations import ( "context" - "path/filepath" "testing" "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/git" migration "code.gitea.io/gitea/modules/migrations/base" "code.gitea.io/gitea/modules/repository" + mirror_service "code.gitea.io/gitea/services/mirror" release_service "code.gitea.io/gitea/services/release" "github.com/stretchr/testify/assert" ) -func TestMain(m *testing.M) { - models.MainTest(m, filepath.Join("..", "..")) -} - -func TestRelease_MirrorDelete(t *testing.T) { - assert.NoError(t, models.PrepareTestDatabase()) +func TestMirrorPull(t *testing.T) { + defer prepareTestEnv(t)() user := models.AssertExistsAndLoadBean(t, &models.User{ID: 2}).(*models.User) repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository) @@ -63,7 +59,9 @@ func TestRelease_MirrorDelete(t *testing.T) { assert.NoError(t, release_service.CreateRelease(gitRepo, &models.Release{ RepoID: repo.ID, + Repo: repo, PublisherID: user.ID, + Publisher: user, TagName: "v0.2", Target: "master", Title: "v0.2 is released", @@ -76,7 +74,7 @@ func TestRelease_MirrorDelete(t *testing.T) { err = mirror.GetMirror() assert.NoError(t, err) - _, ok := runSync(ctx, mirror.Mirror) + ok := mirror_service.SyncPullMirror(ctx, mirror.ID) assert.True(t, ok) count, err := models.GetReleaseCountByRepoID(mirror.ID, findOptions) @@ -87,7 +85,7 @@ func TestRelease_MirrorDelete(t *testing.T) { assert.NoError(t, err) assert.NoError(t, release_service.DeleteReleaseByID(release.ID, user, true)) - _, ok = runSync(ctx, mirror.Mirror) + ok = mirror_service.SyncPullMirror(ctx, mirror.ID) assert.True(t, ok) count, err = models.GetReleaseCountByRepoID(mirror.ID, findOptions) diff --git a/integrations/mirror_push_test.go b/integrations/mirror_push_test.go new file mode 100644 index 000000000000..3191ef770444 --- /dev/null +++ b/integrations/mirror_push_test.go @@ -0,0 +1,86 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "context" + "fmt" + "net/http" + "net/url" + "testing" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/repository" + "code.gitea.io/gitea/modules/setting" + mirror_service "code.gitea.io/gitea/services/mirror" + + "github.com/stretchr/testify/assert" +) + +func TestMirrorPush(t *testing.T) { + onGiteaRun(t, testMirrorPush) +} + +func testMirrorPush(t *testing.T, u *url.URL) { + defer prepareTestEnv(t)() + + setting.Migrations.AllowLocalNetworks = true + + user := models.AssertExistsAndLoadBean(t, &models.User{ID: 2}).(*models.User) + srcRepo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository) + + mirrorRepo, err := repository.CreateRepository(user, user, models.CreateRepoOptions{ + Name: "test-push-mirror", + }) + assert.NoError(t, err) + + ctx := NewAPITestContext(t, user.LowerName, srcRepo.Name) + + doCreatePushMirror(ctx, fmt.Sprintf("%s%s/%s", u.String(), url.PathEscape(ctx.Username), url.PathEscape(mirrorRepo.Name)), user.LowerName, userPassword)(t) + + mirrors, err := models.GetPushMirrorsByRepoID(srcRepo.ID) + assert.NoError(t, err) + assert.Len(t, mirrors, 1) + + ok := mirror_service.SyncPushMirror(context.Background(), mirrors[0].ID) + assert.True(t, ok) + + srcGitRepo, err := git.OpenRepository(srcRepo.RepoPath()) + assert.NoError(t, err) + defer srcGitRepo.Close() + + srcCommit, err := srcGitRepo.GetBranchCommit("master") + assert.NoError(t, err) + + mirrorGitRepo, err := git.OpenRepository(mirrorRepo.RepoPath()) + assert.NoError(t, err) + defer mirrorGitRepo.Close() + + mirrorCommit, err := mirrorGitRepo.GetBranchCommit("master") + assert.NoError(t, err) + + assert.Equal(t, srcCommit.ID, mirrorCommit.ID) +} + +func doCreatePushMirror(ctx APITestContext, address, username, password string) func(t *testing.T) { + return func(t *testing.T) { + csrf := GetCSRF(t, ctx.Session, fmt.Sprintf("/%s/%s/settings", url.PathEscape(ctx.Username), url.PathEscape(ctx.Reponame))) + + req := NewRequestWithValues(t, "POST", fmt.Sprintf("/%s/%s/settings", url.PathEscape(ctx.Username), url.PathEscape(ctx.Reponame)), map[string]string{ + "_csrf": csrf, + "action": "push-mirror-add", + "push_mirror_address": address, + "push_mirror_username": username, + "push_mirror_password": password, + "push_mirror_interval": "0", + }) + ctx.Session.MakeRequest(t, req, http.StatusFound) + + flashCookie := ctx.Session.GetCookie("macaron_flash") + assert.NotNil(t, flashCookie) + assert.Contains(t, flashCookie.Value, "success") + } +} diff --git a/integrations/mssql.ini.tmpl b/integrations/mssql.ini.tmpl index 1867070ff5ef..0a7710fc5fe1 100644 --- a/integrations/mssql.ini.tmpl +++ b/integrations/mssql.ini.tmpl @@ -83,6 +83,7 @@ MODE = test,file ROOT_PATH = mssql-log ROUTER = , XORM = file +ENABLE_SSH_LOG = true [log.test] LEVEL = Info diff --git a/integrations/mysql.ini.tmpl b/integrations/mysql.ini.tmpl index 176992cb26d0..a78b0425a198 100644 --- a/integrations/mysql.ini.tmpl +++ b/integrations/mysql.ini.tmpl @@ -101,6 +101,7 @@ MODE = test,file ROOT_PATH = mysql-log ROUTER = , XORM = file +ENABLE_SSH_LOG = true [log.test] LEVEL = Info diff --git a/integrations/mysql8.ini.tmpl b/integrations/mysql8.ini.tmpl index 7c5bcb58dc5f..1151b6abc26a 100644 --- a/integrations/mysql8.ini.tmpl +++ b/integrations/mysql8.ini.tmpl @@ -80,6 +80,7 @@ MODE = test,file ROOT_PATH = mysql8-log ROUTER = , XORM = file +ENABLE_SSH_LOG = true [log.test] LEVEL = Info diff --git a/integrations/org_count_test.go b/integrations/org_count_test.go index 755ee3cee59f..20917dc17e0c 100644 --- a/integrations/org_count_test.go +++ b/integrations/org_count_test.go @@ -114,11 +114,12 @@ func doCheckOrgCounts(username string, orgCounts map[string]int, strict bool, ca Name: username, }).(*models.User) - user.GetOrganizations(&models.SearchOrganizationsOptions{All: true}) + orgs, err := models.GetOrgsByUserID(user.ID, true) + assert.NoError(t, err) calcOrgCounts := map[string]int{} - for _, org := range user.Orgs { + for _, org := range orgs { calcOrgCounts[org.LowerName] = org.NumRepos count, ok := canonicalCounts[org.LowerName] if ok { diff --git a/integrations/org_test.go b/integrations/org_test.go index b0212f4af7a7..ee61aae6f5ac 100644 --- a/integrations/org_test.go +++ b/integrations/org_test.go @@ -33,7 +33,7 @@ func TestOrgRepos(t *testing.T) { htmlDoc := NewHTMLParser(t, resp.Body) sel := htmlDoc.doc.Find("a.name") - assert.EqualValues(t, len(repos), len(sel.Nodes)) + assert.Len(t, repos, len(sel.Nodes)) for i := 0; i < len(repos); i++ { assert.EqualValues(t, repos[i], strings.TrimSpace(sel.Eq(i).Text())) } diff --git a/integrations/pgsql.ini.tmpl b/integrations/pgsql.ini.tmpl index 3a4a5e6c4fe5..f11d4faba5d9 100644 --- a/integrations/pgsql.ini.tmpl +++ b/integrations/pgsql.ini.tmpl @@ -84,6 +84,7 @@ MODE = test,file ROOT_PATH = pgsql-log ROUTER = , XORM = file +ENABLE_SSH_LOG = true [log.test] LEVEL = Info diff --git a/integrations/pull_merge_test.go b/integrations/pull_merge_test.go index 79f62c32c10f..b4a3397a45f5 100644 --- a/integrations/pull_merge_test.go +++ b/integrations/pull_merge_test.go @@ -197,10 +197,8 @@ func TestCantMergeWorkInProgress(t *testing.T) { text := strings.TrimSpace(htmlDoc.doc.Find(".merge-section > .item").Last().Text()) assert.NotEmpty(t, text, "Can't find WIP text") - // remove from lang - expected := i18n.Tr("en", "repo.pulls.cannot_merge_work_in_progress", "[wip]") - replacer := strings.NewReplacer("", "", "", "") - assert.Equal(t, replacer.Replace(expected), text, "Unable to find WIP text") + assert.Contains(t, text, i18n.Tr("en", "repo.pulls.cannot_merge_work_in_progress"), "Unable to find WIP text") + assert.Contains(t, text, "[wip]", "Unable to find WIP text") }) } diff --git a/integrations/release_test.go b/integrations/release_test.go index a14ad8434e86..4458387ef75d 100644 --- a/integrations/release_test.go +++ b/integrations/release_test.go @@ -10,9 +10,11 @@ import ( "testing" "time" + "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/test" + "github.com/PuerkitoBio/goquery" "github.com/stretchr/testify/assert" "github.com/unknwon/i18n" ) @@ -83,7 +85,7 @@ func TestCreateRelease(t *testing.T) { session := loginUser(t, "user2") createNewRelease(t, session, "/user2/repo1", "v0.0.1", "v0.0.1", false, false) - checkLatestReleaseAndCount(t, session, "/user2/repo1", "v0.0.1", i18n.Tr("en", "repo.release.stable"), 2) + checkLatestReleaseAndCount(t, session, "/user2/repo1", "v0.0.1", i18n.Tr("en", "repo.release.stable"), 4) } func TestCreateReleasePreRelease(t *testing.T) { @@ -92,7 +94,7 @@ func TestCreateReleasePreRelease(t *testing.T) { session := loginUser(t, "user2") createNewRelease(t, session, "/user2/repo1", "v0.0.1", "v0.0.1", true, false) - checkLatestReleaseAndCount(t, session, "/user2/repo1", "v0.0.1", i18n.Tr("en", "repo.release.prerelease"), 2) + checkLatestReleaseAndCount(t, session, "/user2/repo1", "v0.0.1", i18n.Tr("en", "repo.release.prerelease"), 4) } func TestCreateReleaseDraft(t *testing.T) { @@ -101,7 +103,7 @@ func TestCreateReleaseDraft(t *testing.T) { session := loginUser(t, "user2") createNewRelease(t, session, "/user2/repo1", "v0.0.1", "v0.0.1", false, true) - checkLatestReleaseAndCount(t, session, "/user2/repo1", "v0.0.1", i18n.Tr("en", "repo.release.draft"), 2) + checkLatestReleaseAndCount(t, session, "/user2/repo1", "v0.0.1", i18n.Tr("en", "repo.release.draft"), 4) } func TestCreateReleasePaging(t *testing.T) { @@ -114,7 +116,7 @@ func TestCreateReleasePaging(t *testing.T) { setting.API.DefaultPagingNum = 10 session := loginUser(t, "user2") - // Create enaugh releases to have paging + // Create enough releases to have paging for i := 0; i < 12; i++ { version := fmt.Sprintf("v0.0.%d", i) createNewRelease(t, session, "/user2/repo1", version, version, false, false) @@ -127,3 +129,83 @@ func TestCreateReleasePaging(t *testing.T) { session2 := loginUser(t, "user4") checkLatestReleaseAndCount(t, session2, "/user2/repo1", "v0.0.11", i18n.Tr("en", "repo.release.stable"), 10) } + +func TestViewReleaseListNoLogin(t *testing.T) { + defer prepareTestEnv(t)() + + repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository) + + link := repo.Link() + "/releases" + + req := NewRequest(t, "GET", link) + rsp := MakeRequest(t, req, http.StatusOK) + + htmlDoc := NewHTMLParser(t, rsp.Body) + releases := htmlDoc.Find("#release-list li.ui.grid") + assert.Equal(t, 2, releases.Length()) + + links := make([]string, 0, 5) + releases.Each(func(i int, s *goquery.Selection) { + link, exist := s.Find(".release-list-title a").Attr("href") + if !exist { + return + } + links = append(links, link) + }) + + assert.EqualValues(t, []string{"/user2/repo1/releases/tag/v1.0", "/user2/repo1/releases/tag/v1.1"}, links) +} + +func TestViewReleaseListLogin(t *testing.T) { + defer prepareTestEnv(t)() + + repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository) + + link := repo.Link() + "/releases" + + session := loginUser(t, "user1") + req := NewRequest(t, "GET", link) + rsp := session.MakeRequest(t, req, http.StatusOK) + + htmlDoc := NewHTMLParser(t, rsp.Body) + releases := htmlDoc.Find("#release-list li.ui.grid") + assert.Equal(t, 3, releases.Length()) + + links := make([]string, 0, 5) + releases.Each(func(i int, s *goquery.Selection) { + link, exist := s.Find(".release-list-title a").Attr("href") + if !exist { + return + } + links = append(links, link) + }) + + assert.EqualValues(t, []string{ + "/user2/repo1/releases/tag/draft-release", + "/user2/repo1/releases/tag/v1.0", + "/user2/repo1/releases/tag/v1.1", + }, links) +} + +func TestViewTagsList(t *testing.T) { + defer prepareTestEnv(t)() + + repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository) + + link := repo.Link() + "/tags" + + session := loginUser(t, "user1") + req := NewRequest(t, "GET", link) + rsp := session.MakeRequest(t, req, http.StatusOK) + + htmlDoc := NewHTMLParser(t, rsp.Body) + tags := htmlDoc.Find(".tag-list tr") + assert.Equal(t, 3, tags.Length()) + + tagNames := make([]string, 0, 5) + tags.Each(func(i int, s *goquery.Selection) { + tagNames = append(tagNames, s.Find(".tag a.df.ac").Text()) + }) + + assert.EqualValues(t, []string{"v1.0", "delete-tag", "v1.1"}, tagNames) +} diff --git a/integrations/repo_branch_test.go b/integrations/repo_branch_test.go index de4e66898797..af5c475ea7dc 100644 --- a/integrations/repo_branch_test.go +++ b/integrations/repo_branch_test.go @@ -11,6 +11,7 @@ import ( "strings" "testing" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/test" "github.com/stretchr/testify/assert" @@ -134,5 +135,13 @@ func TestCreateBranchInvalidCSRF(t *testing.T) { "_csrf": "fake_csrf", "new_branch_name": "test", }) - session.MakeRequest(t, req, http.StatusBadRequest) + resp := session.MakeRequest(t, req, http.StatusFound) + loc := resp.Header().Get("Location") + assert.Equal(t, setting.AppSubURL+"/", loc) + resp = session.MakeRequest(t, NewRequest(t, "GET", loc), http.StatusOK) + htmlDoc := NewHTMLParser(t, resp.Body) + assert.Equal(t, + "Bad Request: Invalid CSRF token", + strings.TrimSpace(htmlDoc.doc.Find(".ui.message").Text()), + ) } diff --git a/integrations/repo_commits_test.go b/integrations/repo_commits_test.go index 042849db7cf5..8dcbcd368350 100644 --- a/integrations/repo_commits_test.go +++ b/integrations/repo_commits_test.go @@ -66,33 +66,43 @@ func doTestRepoCommitWithStatus(t *testing.T, state string, classes ...string) { doc = NewHTMLParser(t, resp.Body) // Check if commit status is displayed in message column sel := doc.doc.Find("#commits-table tbody tr td.message a.commit-statuses-trigger i.commit-status") - assert.Equal(t, sel.Length(), 1) + assert.Equal(t, 1, sel.Length()) for _, class := range classes { assert.True(t, sel.HasClass(class)) } //By SHA req = NewRequest(t, "GET", "/api/v1/repos/user2/repo1/commits/"+path.Base(commitURL)+"/statuses") - testRepoCommitsWithStatus(t, session.MakeRequest(t, req, http.StatusOK), state) + reqOne := NewRequest(t, "GET", "/api/v1/repos/user2/repo1/commits/"+path.Base(commitURL)+"/status") + testRepoCommitsWithStatus(t, session.MakeRequest(t, req, http.StatusOK), session.MakeRequest(t, reqOne, http.StatusOK), state) + //By Ref req = NewRequest(t, "GET", "/api/v1/repos/user2/repo1/commits/master/statuses") - testRepoCommitsWithStatus(t, session.MakeRequest(t, req, http.StatusOK), state) + reqOne = NewRequest(t, "GET", "/api/v1/repos/user2/repo1/commits/master/status") + testRepoCommitsWithStatus(t, session.MakeRequest(t, req, http.StatusOK), session.MakeRequest(t, reqOne, http.StatusOK), state) req = NewRequest(t, "GET", "/api/v1/repos/user2/repo1/commits/v1.1/statuses") - testRepoCommitsWithStatus(t, session.MakeRequest(t, req, http.StatusOK), state) + reqOne = NewRequest(t, "GET", "/api/v1/repos/user2/repo1/commits/v1.1/status") + testRepoCommitsWithStatus(t, session.MakeRequest(t, req, http.StatusOK), session.MakeRequest(t, reqOne, http.StatusOK), state) } -func testRepoCommitsWithStatus(t *testing.T, resp *httptest.ResponseRecorder, state string) { +func testRepoCommitsWithStatus(t *testing.T, resp, respOne *httptest.ResponseRecorder, state string) { json := jsoniter.ConfigCompatibleWithStandardLibrary - decoder := json.NewDecoder(resp.Body) - statuses := []*api.CommitStatus{} - assert.NoError(t, decoder.Decode(&statuses)) - assert.Len(t, statuses, 1) - for _, s := range statuses { - assert.Equal(t, api.CommitStatusState(state), s.State) - assert.Equal(t, setting.AppURL+"api/v1/repos/user2/repo1/statuses/65f1bf27bc3bf70f64657658635e66094edbcb4d", s.URL) - assert.Equal(t, "http://test.ci/", s.TargetURL) - assert.Equal(t, "", s.Description) - assert.Equal(t, "testci", s.Context) + var statuses []*api.CommitStatus + assert.NoError(t, json.Unmarshal(resp.Body.Bytes(), &statuses)) + var status api.CombinedStatus + assert.NoError(t, json.Unmarshal(respOne.Body.Bytes(), &status)) + assert.NotNil(t, status) + + if assert.Len(t, statuses, 1) { + assert.Equal(t, api.CommitStatusState(state), statuses[0].State) + assert.Equal(t, setting.AppURL+"api/v1/repos/user2/repo1/statuses/65f1bf27bc3bf70f64657658635e66094edbcb4d", statuses[0].URL) + assert.Equal(t, "http://test.ci/", statuses[0].TargetURL) + assert.Equal(t, "", statuses[0].Description) + assert.Equal(t, "testci", statuses[0].Context) + + assert.Len(t, status.Statuses, 1) + assert.Equal(t, statuses[0], status.Statuses[0]) + assert.Equal(t, "65f1bf27bc3bf70f64657658635e66094edbcb4d", status.SHA) } } diff --git a/integrations/repo_tag_test.go b/integrations/repo_tag_test.go new file mode 100644 index 000000000000..eb3f2b47fb9e --- /dev/null +++ b/integrations/repo_tag_test.go @@ -0,0 +1,74 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "io/ioutil" + "net/url" + "testing" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/services/release" + + "github.com/stretchr/testify/assert" +) + +func TestCreateNewTagProtected(t *testing.T) { + defer prepareTestEnv(t)() + + repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository) + owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User) + + t.Run("API", func(t *testing.T) { + defer PrintCurrentTest(t)() + + err := release.CreateNewTag(owner, repo, "master", "v-1", "first tag") + assert.NoError(t, err) + + err = models.InsertProtectedTag(&models.ProtectedTag{ + RepoID: repo.ID, + NamePattern: "v-*", + }) + assert.NoError(t, err) + err = models.InsertProtectedTag(&models.ProtectedTag{ + RepoID: repo.ID, + NamePattern: "v-1.1", + AllowlistUserIDs: []int64{repo.OwnerID}, + }) + assert.NoError(t, err) + + err = release.CreateNewTag(owner, repo, "master", "v-2", "second tag") + assert.Error(t, err) + assert.True(t, models.IsErrProtectedTagName(err)) + + err = release.CreateNewTag(owner, repo, "master", "v-1.1", "third tag") + assert.NoError(t, err) + }) + + t.Run("Git", func(t *testing.T) { + onGiteaRun(t, func(t *testing.T, u *url.URL) { + username := "user2" + httpContext := NewAPITestContext(t, username, "repo1") + + dstPath, err := ioutil.TempDir("", httpContext.Reponame) + assert.NoError(t, err) + defer util.RemoveAll(dstPath) + + u.Path = httpContext.GitPath() + u.User = url.UserPassword(username, userPassword) + + doGitClone(dstPath, u)(t) + + _, err = git.NewCommand("tag", "v-2").RunInDir(dstPath) + assert.NoError(t, err) + + _, err = git.NewCommand("push", "--tags").RunInDir(dstPath) + assert.Error(t, err) + assert.Contains(t, err.Error(), "Tag v-2 is protected") + }) + }) +} diff --git a/integrations/repo_test.go b/integrations/repo_test.go index c1652aeb1d32..8c4cdf5a969f 100644 --- a/integrations/repo_test.go +++ b/integrations/repo_test.go @@ -154,12 +154,12 @@ func TestViewRepoWithSymlinks(t *testing.T) { file := strings.Trim(s.Find("A").Text(), " \t\n") return fmt.Sprintf("%s: %s", file, cls) }) - assert.Equal(t, len(items), 5) - assert.Equal(t, items[0], "a: svg octicon-file-directory") - assert.Equal(t, items[1], "link_b: svg octicon-file-submodule") - assert.Equal(t, items[2], "link_d: svg octicon-file-symlink-file") - assert.Equal(t, items[3], "link_hi: svg octicon-file-symlink-file") - assert.Equal(t, items[4], "link_link: svg octicon-file-symlink-file") + assert.Len(t, items, 5) + assert.Equal(t, "a: svg octicon-file-directory", items[0]) + assert.Equal(t, "link_b: svg octicon-file-submodule", items[1]) + assert.Equal(t, "link_d: svg octicon-file-symlink-file", items[2]) + assert.Equal(t, "link_hi: svg octicon-file-symlink-file", items[3]) + assert.Equal(t, "link_link: svg octicon-file-symlink-file", items[4]) } // TestViewAsRepoAdmin tests PR #2167 diff --git a/integrations/sqlite.ini.tmpl b/integrations/sqlite.ini.tmpl index 4a796e931787..71ac39a44baf 100644 --- a/integrations/sqlite.ini.tmpl +++ b/integrations/sqlite.ini.tmpl @@ -79,6 +79,7 @@ MODE = test,file ROOT_PATH = sqlite-log ROUTER = , XORM = file +ENABLE_SSH_LOG = true [log.test] LEVEL = Info diff --git a/integrations/user_avatar_test.go b/integrations/user_avatar_test.go new file mode 100644 index 000000000000..1a3a851281e9 --- /dev/null +++ b/integrations/user_avatar_test.go @@ -0,0 +1,87 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "bytes" + "image/png" + "io" + "mime/multipart" + "net/http" + "net/url" + "strings" + "testing" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/avatar" + "github.com/stretchr/testify/assert" +) + +func TestUserAvatar(t *testing.T) { + onGiteaRun(t, func(t *testing.T, u *url.URL) { + user2 := models.AssertExistsAndLoadBean(t, &models.User{ID: 2}).(*models.User) // owner of the repo3, is an org + + seed := user2.Email + if len(seed) == 0 { + seed = user2.Name + } + + img, err := avatar.RandomImage([]byte(seed)) + if err != nil { + assert.NoError(t, err) + return + } + + session := loginUser(t, "user2") + csrf := GetCSRF(t, session, "/user/settings") + + imgData := &bytes.Buffer{} + + body := &bytes.Buffer{} + + //Setup multi-part + writer := multipart.NewWriter(body) + writer.WriteField("source", "local") + part, err := writer.CreateFormFile("avatar", "avatar-for-testuseravatar.png") + if err != nil { + assert.NoError(t, err) + return + } + + if err := png.Encode(imgData, img); err != nil { + assert.NoError(t, err) + return + } + + if _, err := io.Copy(part, imgData); err != nil { + assert.NoError(t, err) + return + } + + if err := writer.Close(); err != nil { + assert.NoError(t, err) + return + } + + req := NewRequestWithBody(t, "POST", "/user/settings/avatar", body) + req.Header.Add("X-Csrf-Token", csrf) + req.Header.Add("Content-Type", writer.FormDataContentType()) + + session.MakeRequest(t, req, http.StatusFound) + + user2 = models.AssertExistsAndLoadBean(t, &models.User{ID: 2}).(*models.User) // owner of the repo3, is an org + + req = NewRequest(t, "GET", user2.AvatarLink()) + resp := session.MakeRequest(t, req, http.StatusFound) + location := resp.Header().Get("Location") + if !strings.HasPrefix(location, "/avatars") { + assert.Fail(t, "Avatar location is not local: %s", location) + } + req = NewRequest(t, "GET", location) + session.MakeRequest(t, req, http.StatusOK) + + // Can't test if the response matches because the image is regened on upload but checking that this at least doesn't give a 404 should be enough. + }) +} diff --git a/jest.config.js b/jest.config.js index 317b64d7c134..f95ef1c39cab 100644 --- a/jest.config.js +++ b/jest.config.js @@ -1,8 +1,9 @@ export default { setupFilesAfterEnv: ['jest-extended'], testTimeout: 20000, + rootDir: 'web_src', testMatch: [ - '**/web_src/**/*.test.js', + '/**/*.test.js', ], transform: {}, verbose: false, diff --git a/models/action.go b/models/action.go index 9cd78dbfa2d4..ed023e5e154a 100644 --- a/models/action.go +++ b/models/action.go @@ -26,31 +26,32 @@ type ActionType int // Possible action types. const ( - ActionCreateRepo ActionType = iota + 1 // 1 - ActionRenameRepo // 2 - ActionStarRepo // 3 - ActionWatchRepo // 4 - ActionCommitRepo // 5 - ActionCreateIssue // 6 - ActionCreatePullRequest // 7 - ActionTransferRepo // 8 - ActionPushTag // 9 - ActionCommentIssue // 10 - ActionMergePullRequest // 11 - ActionCloseIssue // 12 - ActionReopenIssue // 13 - ActionClosePullRequest // 14 - ActionReopenPullRequest // 15 - ActionDeleteTag // 16 - ActionDeleteBranch // 17 - ActionMirrorSyncPush // 18 - ActionMirrorSyncCreate // 19 - ActionMirrorSyncDelete // 20 - ActionApprovePullRequest // 21 - ActionRejectPullRequest // 22 - ActionCommentPull // 23 - ActionPublishRelease // 24 - ActionPullReviewDismissed // 25 + ActionCreateRepo ActionType = iota + 1 // 1 + ActionRenameRepo // 2 + ActionStarRepo // 3 + ActionWatchRepo // 4 + ActionCommitRepo // 5 + ActionCreateIssue // 6 + ActionCreatePullRequest // 7 + ActionTransferRepo // 8 + ActionPushTag // 9 + ActionCommentIssue // 10 + ActionMergePullRequest // 11 + ActionCloseIssue // 12 + ActionReopenIssue // 13 + ActionClosePullRequest // 14 + ActionReopenPullRequest // 15 + ActionDeleteTag // 16 + ActionDeleteBranch // 17 + ActionMirrorSyncPush // 18 + ActionMirrorSyncCreate // 19 + ActionMirrorSyncDelete // 20 + ActionApprovePullRequest // 21 + ActionRejectPullRequest // 22 + ActionCommentPull // 23 + ActionPublishRelease // 24 + ActionPullReviewDismissed // 25 + ActionPullRequestReadyForReview // 26 ) // Action represents user operation type and other information to diff --git a/models/admin.go b/models/admin.go index 7911ce75112c..3a784d66964a 100644 --- a/models/admin.go +++ b/models/admin.go @@ -114,6 +114,11 @@ func DeleteNotice(id int64) error { // DeleteNotices deletes all notices with ID from start to end (inclusive). func DeleteNotices(start, end int64) error { + if start == 0 && end == 0 { + _, err := x.Exec("DELETE FROM notice") + return err + } + sess := x.Where("id >= ?", start) if end > 0 { sess.And("id <= ?", end) diff --git a/models/attachment_test.go b/models/attachment_test.go index fa7fd3471bab..700b7c09dbb1 100644 --- a/models/attachment_test.go +++ b/models/attachment_test.go @@ -61,11 +61,11 @@ func TestGetByCommentOrIssueID(t *testing.T) { // count of attachments from issue ID attachments, err := GetAttachmentsByIssueID(1) assert.NoError(t, err) - assert.Equal(t, 1, len(attachments)) + assert.Len(t, attachments, 1) attachments, err = GetAttachmentsByCommentID(1) assert.NoError(t, err) - assert.Equal(t, 2, len(attachments)) + assert.Len(t, attachments, 2) } func TestDeleteAttachments(t *testing.T) { @@ -122,7 +122,7 @@ func TestGetAttachmentsByUUIDs(t *testing.T) { attachList, err := GetAttachmentsByUUIDs(DefaultDBContext(), []string{"a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11", "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a17", "not-existing-uuid"}) assert.NoError(t, err) - assert.Equal(t, 2, len(attachList)) + assert.Len(t, attachList, 2) assert.Equal(t, "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11", attachList[0].UUID) assert.Equal(t, "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a17", attachList[1].UUID) assert.Equal(t, int64(1), attachList[0].IssueID) diff --git a/models/avatar.go b/models/avatar.go index 4f02fe508981..b4c078f8cf9f 100644 --- a/models/avatar.go +++ b/models/avatar.go @@ -44,7 +44,7 @@ const DefaultAvatarSize = -1 const DefaultAvatarPixelSize = 28 // AvatarRenderedSizeFactor is the factor by which the default size is increased for finer rendering -const AvatarRenderedSizeFactor = 2 +const AvatarRenderedSizeFactor = 4 // HashEmail hashes email address to MD5 string. // https://en.gravatar.com/site/implement/hash/ diff --git a/models/avatar_test.go b/models/avatar_test.go index bf99897d8812..09f1a8066da5 100644 --- a/models/avatar_test.go +++ b/models/avatar_test.go @@ -40,8 +40,10 @@ func TestHashEmail(t *testing.T) { } func TestSizedAvatarLink(t *testing.T) { + setting.AppSubURL = "/testsuburl" + disableGravatar() - assert.Equal(t, "/suburl/assets/img/avatar_default.png", + assert.Equal(t, "/testsuburl/assets/img/avatar_default.png", SizedAvatarLink("gitea@example.com", 100)) enableGravatar(t) diff --git a/models/branches.go b/models/branches.go index 1ac1fa49e580..e13d84ee0524 100644 --- a/models/branches.go +++ b/models/branches.go @@ -219,7 +219,7 @@ func (protectBranch *ProtectedBranch) GetProtectedFilePatterns() []glob.Glob { expr = strings.TrimSpace(expr) if expr != "" { if g, err := glob.Compile(expr, '.', '/'); err != nil { - log.Info("Invalid glob expresion '%s' (skipped): %v", expr, err) + log.Info("Invalid glob expression '%s' (skipped): %v", expr, err) } else { extarr = append(extarr, g) } @@ -362,11 +362,7 @@ func (repo *Repository) GetBranchProtection(branchName string) (*ProtectedBranch } // IsProtectedBranch checks if branch is protected -func (repo *Repository) IsProtectedBranch(branchName string, doer *User) (bool, error) { - if doer == nil { - return true, nil - } - +func (repo *Repository) IsProtectedBranch(branchName string) (bool, error) { protectedBranch := &ProtectedBranch{ RepoID: repo.ID, BranchName: branchName, @@ -379,27 +375,6 @@ func (repo *Repository) IsProtectedBranch(branchName string, doer *User) (bool, return has, nil } -// IsProtectedBranchForPush checks if branch is protected for push -func (repo *Repository) IsProtectedBranchForPush(branchName string, doer *User) (bool, error) { - if doer == nil { - return true, nil - } - - protectedBranch := &ProtectedBranch{ - RepoID: repo.ID, - BranchName: branchName, - } - - has, err := x.Get(protectedBranch) - if err != nil { - return true, err - } else if has { - return !protectedBranch.CanUserPush(doer.ID), nil - } - - return false, nil -} - // updateApprovalWhitelist checks whether the user whitelist changed and returns a whitelist with // the users from newWhitelist which have explicit read or write access to the repo. func updateApprovalWhitelist(repo *Repository, currentWhitelist, newWhitelist []int64) (whitelist []int64, err error) { diff --git a/models/consistency.go b/models/consistency.go index 77a8018266e5..f037b0515704 100644 --- a/models/consistency.go +++ b/models/consistency.go @@ -141,6 +141,12 @@ func (milestone *Milestone) checkForConsistency(t *testing.T) { actual := getCount(t, x.Where("is_closed=?", true), &Issue{MilestoneID: milestone.ID}) assert.EqualValues(t, milestone.NumClosedIssues, actual, "Unexpected number of closed issues for milestone %+v", milestone) + + completeness := 0 + if milestone.NumIssues > 0 { + completeness = milestone.NumClosedIssues * 100 / milestone.NumIssues + } + assert.Equal(t, completeness, milestone.Completeness) } func (label *Label) checkForConsistency(t *testing.T) { @@ -180,17 +186,21 @@ func CountOrphanedLabels() (int64, error) { } norepo, err := x.Table("label"). - Join("LEFT", "repository", "label.repo_id=repository.id"). - Where(builder.IsNull{"repository.id"}).And(builder.Gt{"label.repo_id": 0}). - Count("id") + Where(builder.And( + builder.Gt{"repo_id": 0}, + builder.NotIn("repo_id", builder.Select("id").From("repository")), + )). + Count() if err != nil { return 0, err } noorg, err := x.Table("label"). - Join("LEFT", "`user`", "label.org_id=`user`.id"). - Where(builder.IsNull{"`user`.id"}).And(builder.Gt{"label.org_id": 0}). - Count("id") + Where(builder.And( + builder.Gt{"org_id": 0}, + builder.NotIn("org_id", builder.Select("id").From("user")), + )). + Count() if err != nil { return 0, err } @@ -206,17 +216,21 @@ func DeleteOrphanedLabels() error { } // delete labels with none existing repos - if _, err := x.In("id", builder.Select("label.id").From("label"). - Join("LEFT", "repository", "label.repo_id=repository.id"). - Where(builder.IsNull{"repository.id"}).And(builder.Gt{"label.repo_id": 0})). + if _, err := x. + Where(builder.And( + builder.Gt{"repo_id": 0}, + builder.NotIn("repo_id", builder.Select("id").From("repository")), + )). Delete(Label{}); err != nil { return err } // delete labels with none existing orgs - if _, err := x.In("id", builder.Select("label.id").From("label"). - Join("LEFT", "`user`", "label.org_id=`user`.id"). - Where(builder.IsNull{"`user`.id"}).And(builder.Gt{"label.org_id": 0})). + if _, err := x. + Where(builder.And( + builder.Gt{"org_id": 0}, + builder.NotIn("org_id", builder.Select("id").From("user")), + )). Delete(Label{}); err != nil { return err } @@ -227,15 +241,14 @@ func DeleteOrphanedLabels() error { // CountOrphanedIssueLabels return count of IssueLabels witch have no label behind anymore func CountOrphanedIssueLabels() (int64, error) { return x.Table("issue_label"). - Join("LEFT", "label", "issue_label.label_id = label.id"). - Where(builder.IsNull{"label.id"}).Count() + NotIn("label_id", builder.Select("id").From("label")). + Count() } // DeleteOrphanedIssueLabels delete IssueLabels witch have no label behind anymore func DeleteOrphanedIssueLabels() error { - _, err := x.In("id", builder.Select("issue_label.id").From("issue_label"). - Join("LEFT", "label", "issue_label.label_id = label.id"). - Where(builder.IsNull{"label.id"})). + _, err := x. + NotIn("label_id", builder.Select("id").From("label")). Delete(IssueLabel{}) return err diff --git a/models/context.go b/models/context.go index 4fbd3b6699ee..1221ab7dede9 100644 --- a/models/context.go +++ b/models/context.go @@ -42,7 +42,7 @@ func WithContext(f func(ctx DBContext) error) error { return f(DBContext{x}) } -// WithTx represents executing database operations on a trasaction +// WithTx represents executing database operations on a transaction func WithTx(f func(ctx DBContext) error) error { sess := x.NewSession() if err := sess.Begin(); err != nil { diff --git a/models/convert.go b/models/convert.go index baa63bb38839..1deb7c66fbbd 100644 --- a/models/convert.go +++ b/models/convert.go @@ -8,10 +8,16 @@ import ( "fmt" "code.gitea.io/gitea/modules/setting" + + "xorm.io/xorm/schemas" ) // ConvertUtf8ToUtf8mb4 converts database and tables from utf8 to utf8mb4 if it's mysql and set ROW_FORMAT=dynamic func ConvertUtf8ToUtf8mb4() error { + if x.Dialect().URI().DBType != schemas.MYSQL { + return nil + } + _, err := x.Exec(fmt.Sprintf("ALTER DATABASE `%s` CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci", setting.Database.Name)) if err != nil { return err diff --git a/models/error.go b/models/error.go index 48cba57a8135..fd8f2771ae25 100644 --- a/models/error.go +++ b/models/error.go @@ -237,6 +237,21 @@ func (err ErrEmailAddressNotExist) Error() string { return fmt.Sprintf("Email address does not exist [email: %s]", err.Email) } +// ErrPrimaryEmailCannotDelete primary email address cannot be deleted +type ErrPrimaryEmailCannotDelete struct { + Email string +} + +// IsErrPrimaryEmailCannotDelete checks if an error is an ErrPrimaryEmailCannotDelete +func IsErrPrimaryEmailCannotDelete(err error) bool { + _, ok := err.(ErrPrimaryEmailCannotDelete) + return ok +} + +func (err ErrPrimaryEmailCannotDelete) Error() string { + return fmt.Sprintf("Primary email address cannot be deleted [email: %s]", err.Email) +} + // ErrOpenIDAlreadyUsed represents a "OpenIDAlreadyUsed" kind of error. type ErrOpenIDAlreadyUsed struct { OpenID string @@ -436,6 +451,7 @@ func (err ErrKeyNameAlreadyUsed) Error() string { // ErrGPGNoEmailFound represents a "ErrGPGNoEmailFound" kind of error. type ErrGPGNoEmailFound struct { FailedEmails []string + ID string } // IsErrGPGNoEmailFound checks if an error is a ErrGPGNoEmailFound. @@ -448,6 +464,22 @@ func (err ErrGPGNoEmailFound) Error() string { return fmt.Sprintf("none of the emails attached to the GPG key could be found: %v", err.FailedEmails) } +// ErrGPGInvalidTokenSignature represents a "ErrGPGInvalidTokenSignature" kind of error. +type ErrGPGInvalidTokenSignature struct { + Wrapped error + ID string +} + +// IsErrGPGInvalidTokenSignature checks if an error is a ErrGPGInvalidTokenSignature. +func IsErrGPGInvalidTokenSignature(err error) bool { + _, ok := err.(ErrGPGInvalidTokenSignature) + return ok +} + +func (err ErrGPGInvalidTokenSignature) Error() string { + return "the provided signature does not sign the token with the provided key" +} + // ErrGPGKeyParsing represents a "ErrGPGKeyParsing" kind of error. type ErrGPGKeyParsing struct { ParseError error @@ -970,6 +1002,21 @@ func (err ErrInvalidTagName) Error() string { return fmt.Sprintf("release tag name is not valid [tag_name: %s]", err.TagName) } +// ErrProtectedTagName represents a "ProtectedTagName" kind of error. +type ErrProtectedTagName struct { + TagName string +} + +// IsErrProtectedTagName checks if an error is a ErrProtectedTagName. +func IsErrProtectedTagName(err error) bool { + _, ok := err.(ErrProtectedTagName) + return ok +} + +func (err ErrProtectedTagName) Error() string { + return fmt.Sprintf("release tag name is protected [tag_name: %s]", err.TagName) +} + // ErrRepoFileAlreadyExists represents a "RepoFileAlreadyExist" kind of error. type ErrRepoFileAlreadyExists struct { Path string @@ -1084,7 +1131,7 @@ func IsErrUserDoesNotHaveAccessToRepo(err error) bool { } func (err ErrUserDoesNotHaveAccessToRepo) Error() string { - return fmt.Sprintf("user doesn't have acces to repo [user_id: %d, repo_name: %s]", err.UserID, err.RepoName) + return fmt.Sprintf("user doesn't have access to repo [user_id: %d, repo_name: %s]", err.UserID, err.RepoName) } // ErrWontSign explains the first reason why a commit would not be signed @@ -1259,7 +1306,7 @@ func IsErrSHAOrCommitIDNotProvided(err error) bool { } func (err ErrSHAOrCommitIDNotProvided) Error() string { - return "a SHA or commmit ID must be proved when updating a file" + return "a SHA or commit ID must be proved when updating a file" } // __ __ ___. .__ __ diff --git a/models/fixtures/action.yml b/models/fixtures/action.yml index 14cfd90423cf..e3f3d2a97126 100644 --- a/models/fixtures/action.yml +++ b/models/fixtures/action.yml @@ -32,3 +32,27 @@ repo_id: 22 is_private: true created_unix: 1603267920 + +- id: 5 + user_id: 10 + op_type: 1 # create repo + act_user_id: 10 + repo_id: 6 + is_private: true + created_unix: 1603010100 + +- id: 6 + user_id: 10 + op_type: 1 # create repo + act_user_id: 10 + repo_id: 7 + is_private: true + created_unix: 1603011300 + +- id: 7 + user_id: 10 + op_type: 1 # create repo + act_user_id: 10 + repo_id: 8 + is_private: false + created_unix: 1603011540 # grouped with id:7 diff --git a/models/fixtures/email_address.yml b/models/fixtures/email_address.yml index c37d9a787778..e7df5fdc5f01 100644 --- a/models/fixtures/email_address.yml +++ b/models/fixtures/email_address.yml @@ -1,35 +1,279 @@ - id: 1 - uid: 1 + uid: 11 email: user11@example.com + lower_email: user11@example.com is_activated: false + is_primary: true - id: 2 - uid: 1 + uid: 12 email: user12@example.com - is_activated: false + lower_email: user12@example.com + is_activated: true + is_primary: true - id: 3 uid: 2 email: user2@example.com + lower_email: user2@example.com is_activated: true + is_primary: true - id: 4 - uid: 2 + uid: 21 email: user21@example.com - is_activated: false + lower_email: user21@example.com + is_activated: true + is_primary: true - id: 5 uid: 9999999 email: user9999999@example.com + lower_email: user9999999@example.com is_activated: true + is_primary: false - id: 6 uid: 10 + email: user10@example.com + lower_email: user10@example.com + is_activated: true + is_primary: true + +- + id: 7 + uid: 10 email: user101@example.com + lower_email: user101@example.com + is_activated: true + is_primary: false + +- + id: 8 + uid: 9 + email: user9@example.com + lower_email: user9@example.com + is_activated: false + is_primary: true + +- + id: 9 + uid: 1 + email: user1@example.com + lower_email: user1@example.com + is_activated: true + is_primary: true + +- + id: 10 + uid: 3 + email: user3@example.com + lower_email: user3@example.com + is_activated: true + is_primary: true + +- + id: 11 + uid: 4 + email: user4@example.com + lower_email: user4@example.com + is_activated: true + is_primary: true + +- + id: 12 + uid: 5 + email: user5@example.com + lower_email: user5@example.com is_activated: true + is_primary: true + +- + id: 13 + uid: 6 + email: user6@example.com + lower_email: user6@example.com + is_activated: true + is_primary: true + +- + id: 14 + uid: 7 + email: user7@example.com + lower_email: user7@example.com + is_activated: true + is_primary: true + +- + id: 15 + uid: 8 + email: user8@example.com + lower_email: user8@example.com + is_activated: true + is_primary: true + +- + id: 16 + uid: 13 + email: user13@example.com + lower_email: user13@example.com + is_activated: true + is_primary: true + +- + id: 17 + uid: 14 + email: user14@example.com + lower_email: user14@example.com + is_activated: true + is_primary: true + +- + id: 18 + uid: 15 + email: user15@example.com + lower_email: user15@example.com + is_activated: true + is_primary: true + +- + id: 19 + uid: 16 + email: user16@example.com + lower_email: user16@example.com + is_activated: true + is_primary: true + +- + id: 20 + uid: 17 + email: user17@example.com + lower_email: user17@example.com + is_activated: true + is_primary: true + +- + id: 21 + uid: 18 + email: user18@example.com + lower_email: user18@example.com + is_activated: true + is_primary: true + +- + id: 22 + uid: 19 + email: user19@example.com + lower_email: user19@example.com + is_activated: true + is_primary: true + +- + id: 23 + uid: 20 + email: user20@example.com + lower_email: user20@example.com + is_activated: true + is_primary: true + +- + id: 24 + uid: 22 + email: limited_org@example.com + lower_email: limited_org@example.com + is_activated: true + is_primary: true + +- + id: 25 + uid: 23 + email: privated_org@example.com + lower_email: privated_org@example.com + is_activated: true + is_primary: true + +- + id: 26 + uid: 24 + email: user24@example.com + lower_email: user24@example.com + is_activated: true + is_primary: true + +- + id: 27 + uid: 25 + email: org25@example.com + lower_email: org25@example.com + is_activated: true + is_primary: true + +- + id: 28 + uid: 26 + email: org26@example.com + lower_email: org26@example.com + is_activated: true + is_primary: true + +- + id: 29 + uid: 27 + email: user27@example.com + lower_email: user27@example.com + is_activated: true + is_primary: true + +- + id: 30 + uid: 28 + email: user28@example.com + lower_email: user28@example.com + is_activated: true + is_primary: true + +- + id: 31 + uid: 29 + email: user29@example.com + lower_email: user29@example.com + is_activated: true + is_primary: true + +- + id: 32 + uid: 30 + email: user30@example.com + lower_email: user30@example.com + is_activated: true + is_primary: true + +- + id: 33 + uid: 1 + email: user1-2@example.com + lower_email: user1-2@example.com + is_activated: true + is_primary: false + +- + id: 34 + uid: 1 + email: user1-3@example.com + lower_email: user1-3@example.com + is_activated: true + is_primary: false + +- + id: 35 + uid: 2 + email: user2-2@example.com + lower_email: user2-2@example.com + is_activated: false + is_primary: false \ No newline at end of file diff --git a/models/fixtures/issue.yml b/models/fixtures/issue.yml index 31df00d9e699..946899d6ff03 100644 --- a/models/fixtures/issue.yml +++ b/models/fixtures/issue.yml @@ -152,7 +152,7 @@ - id: 13 repo_id: 50 - index: 0 + index: 1 poster_id: 2 name: issue in active repo content: we'll be testing github issue 13171 with this. @@ -164,7 +164,7 @@ - id: 14 repo_id: 51 - index: 0 + index: 1 poster_id: 2 name: issue in archived repo content: we'll be testing github issue 13171 with this. diff --git a/models/fixtures/issue_index.yml b/models/fixtures/issue_index.yml new file mode 100644 index 000000000000..49d95c57ab77 --- /dev/null +++ b/models/fixtures/issue_index.yml @@ -0,0 +1,24 @@ +- + group_id: 1 + max_index: 5 +- + group_id: 2 + max_index: 2 +- + group_id: 3 + max_index: 2 +- + group_id: 10 + max_index: 1 +- + group_id: 48 + max_index: 1 +- + group_id: 42 + max_index: 1 +- + group_id: 50 + max_index: 1 +- + group_id: 51 + max_index: 1 \ No newline at end of file diff --git a/models/fixtures/issue_user.yml b/models/fixtures/issue_user.yml index 8039b1e40ff1..64824316ea27 100644 --- a/models/fixtures/issue_user.yml +++ b/models/fixtures/issue_user.yml @@ -17,4 +17,4 @@ uid: 4 issue_id: 1 is_read: false - is_mentioned: false + is_mentioned: true diff --git a/models/fixtures/release.yml b/models/fixtures/release.yml index 8d3f5840efef..1703f959d268 100644 --- a/models/fixtures/release.yml +++ b/models/fixtures/release.yml @@ -1,5 +1,4 @@ -- - id: 1 +- id: 1 repo_id: 1 publisher_id: 2 tag_name: "v1.1" @@ -13,8 +12,7 @@ is_tag: false created_unix: 946684800 -- - id: 2 +- id: 2 repo_id: 40 publisher_id: 2 tag_name: "v1.1" @@ -28,8 +26,7 @@ is_tag: false created_unix: 946684800 -- - id: 3 +- id: 3 repo_id: 1 publisher_id: 2 tag_name: "delete-tag" @@ -43,3 +40,29 @@ is_tag: true created_unix: 946684800 +- id: 4 + repo_id: 1 + publisher_id: 2 + tag_name: "draft-release" + lower_tag_name: "draft-release" + target: "master" + title: "draft-release" + is_draft: true + is_prerelease: false + is_tag: false + created_unix: 1619524806 + +- id: 5 + repo_id: 1 + publisher_id: 2 + tag_name: "v1.0" + lower_tag_name: "v1.0" + target: "master" + title: "pre-release" + note: "some text for a pre release" + sha1: "65f1bf27bc3bf70f64657658635e66094edbcb4d" + num_commits: 1 + is_draft: false + is_prerelease: true + is_tag: false + created_unix: 946684800 diff --git a/models/fixtures/repo_archiver.yml b/models/fixtures/repo_archiver.yml new file mode 100644 index 000000000000..ca780a73aa0c --- /dev/null +++ b/models/fixtures/repo_archiver.yml @@ -0,0 +1 @@ +[] # empty diff --git a/models/fixtures/user.yml b/models/fixtures/user.yml index d903a7942f81..850ee4041d81 100644 --- a/models/fixtures/user.yml +++ b/models/fixtures/user.yml @@ -508,7 +508,6 @@ num_repos: 0 is_active: true - - id: 30 lower_name: user30 @@ -525,3 +524,20 @@ avatar_email: user30@example.com num_repos: 2 is_active: true + +- + id: 31 + lower_name: user31 + name: user31 + full_name: "user31" + email: user31@example.com + passwd_hash_algo: argon2 + passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b # password + type: 0 # individual + salt: ZogKvWdyEx + is_admin: false + visibility: 2 + avatar: avatar31 + avatar_email: user31@example.com + num_repos: 0 + is_active: true diff --git a/models/gpg_key.go b/models/gpg_key.go index 2ffcf47ca7d4..74ffb82a545b 100644 --- a/models/gpg_key.go +++ b/models/gpg_key.go @@ -5,27 +5,25 @@ package models import ( - "bytes" - "container/list" - "crypto" - "encoding/base64" "fmt" - "hash" - "io" "strings" "time" - "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" - "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" "github.com/keybase/go-crypto/openpgp" - "github.com/keybase/go-crypto/openpgp/armor" "github.com/keybase/go-crypto/openpgp/packet" "xorm.io/xorm" ) +// __________________ ________ ____ __. +// / _____/\______ \/ _____/ | |/ _|____ ___.__. +// / \ ___ | ___/ \ ___ | <_/ __ < | | +// \ \_\ \| | \ \_\ \ | | \ ___/\___ | +// \______ /|____| \______ / |____|__ \___ > ____| +// \/ \/ \/ \/\/ + // GPGKey represents a GPG key. type GPGKey struct { ID int64 `xorm:"pk autoincr"` @@ -38,18 +36,13 @@ type GPGKey struct { AddedUnix timeutil.TimeStamp SubsKey []*GPGKey `xorm:"-"` Emails []*EmailAddress + Verified bool `xorm:"NOT NULL DEFAULT false"` CanSign bool CanEncryptComms bool CanEncryptStorage bool CanCertify bool } -// GPGKeyImport the original import of key -type GPGKeyImport struct { - KeyID string `xorm:"pk CHAR(16) NOT NULL"` - Content string `xorm:"TEXT NOT NULL"` -} - // BeforeInsert will be invoked by XORM before inserting a record func (key *GPGKey) BeforeInsert() { key.AddedUnix = timeutil.TimeStampNow() @@ -96,131 +89,6 @@ func GetGPGKeysByKeyID(keyID string) ([]*GPGKey, error) { return keys, x.Where("key_id=?", keyID).Find(&keys) } -// GetGPGImportByKeyID returns the import public armored key by given KeyID. -func GetGPGImportByKeyID(keyID string) (*GPGKeyImport, error) { - key := new(GPGKeyImport) - has, err := x.ID(keyID).Get(key) - if err != nil { - return nil, err - } else if !has { - return nil, ErrGPGKeyImportNotExist{keyID} - } - return key, nil -} - -// checkArmoredGPGKeyString checks if the given key string is a valid GPG armored key. -// The function returns the actual public key on success -func checkArmoredGPGKeyString(content string) (openpgp.EntityList, error) { - list, err := openpgp.ReadArmoredKeyRing(strings.NewReader(content)) - if err != nil { - return nil, ErrGPGKeyParsing{err} - } - return list, nil -} - -// addGPGKey add key, import and subkeys to database -func addGPGKey(e Engine, key *GPGKey, content string) (err error) { - // Add GPGKeyImport - if _, err = e.Insert(GPGKeyImport{ - KeyID: key.KeyID, - Content: content, - }); err != nil { - return err - } - // Save GPG primary key. - if _, err = e.Insert(key); err != nil { - return err - } - // Save GPG subs key. - for _, subkey := range key.SubsKey { - if err := addGPGSubKey(e, subkey); err != nil { - return err - } - } - return nil -} - -// addGPGSubKey add subkeys to database -func addGPGSubKey(e Engine, key *GPGKey) (err error) { - // Save GPG primary key. - if _, err = e.Insert(key); err != nil { - return err - } - // Save GPG subs key. - for _, subkey := range key.SubsKey { - if err := addGPGSubKey(e, subkey); err != nil { - return err - } - } - return nil -} - -// AddGPGKey adds new public key to database. -func AddGPGKey(ownerID int64, content string) ([]*GPGKey, error) { - ekeys, err := checkArmoredGPGKeyString(content) - if err != nil { - return nil, err - } - sess := x.NewSession() - defer sess.Close() - if err = sess.Begin(); err != nil { - return nil, err - } - keys := make([]*GPGKey, 0, len(ekeys)) - for _, ekey := range ekeys { - // Key ID cannot be duplicated. - has, err := sess.Where("key_id=?", ekey.PrimaryKey.KeyIdString()). - Get(new(GPGKey)) - if err != nil { - return nil, err - } else if has { - return nil, ErrGPGKeyIDAlreadyUsed{ekey.PrimaryKey.KeyIdString()} - } - - // Get DB session - - key, err := parseGPGKey(ownerID, ekey) - if err != nil { - return nil, err - } - - if err = addGPGKey(sess, key, content); err != nil { - return nil, err - } - keys = append(keys, key) - } - return keys, sess.Commit() -} - -// base64EncPubKey encode public key content to base 64 -func base64EncPubKey(pubkey *packet.PublicKey) (string, error) { - var w bytes.Buffer - err := pubkey.Serialize(&w) - if err != nil { - return "", err - } - return base64.StdEncoding.EncodeToString(w.Bytes()), nil -} - -// base64DecPubKey decode public key content from base 64 -func base64DecPubKey(content string) (*packet.PublicKey, error) { - b, err := readerFromBase64(content) - if err != nil { - return nil, err - } - // Read key - p, err := packet.Read(b) - if err != nil { - return nil, err - } - // Check type - pkey, ok := p.(*packet.PublicKey) - if !ok { - return nil, fmt.Errorf("key is not a public key") - } - return pkey, nil -} - // GPGKeyToEntity retrieve the imported key and the traducted entity func GPGKeyToEntity(k *GPGKey) (*openpgp.Entity, error) { impKey, err := GetGPGImportByKeyID(k.KeyID) @@ -254,27 +122,8 @@ func parseSubGPGKey(ownerID int64, primaryID string, pubkey *packet.PublicKey, e }, nil } -// getExpiryTime extract the expire time of primary key based on sig -func getExpiryTime(e *openpgp.Entity) time.Time { - expiry := time.Time{} - // Extract self-sign for expire date based on : https://github.com/golang/crypto/blob/master/openpgp/keys.go#L165 - var selfSig *packet.Signature - for _, ident := range e.Identities { - if selfSig == nil { - selfSig = ident.SelfSignature - } else if ident.SelfSignature.IsPrimaryId != nil && *ident.SelfSignature.IsPrimaryId { - selfSig = ident.SelfSignature - break - } - } - if selfSig.KeyLifetimeSecs != nil { - expiry = e.PrimaryKey.CreationTime.Add(time.Duration(*selfSig.KeyLifetimeSecs) * time.Second) - } - return expiry -} - // parseGPGKey parse a PrimaryKey entity (primary key + subs keys + self-signature) -func parseGPGKey(ownerID int64, e *openpgp.Entity) (*GPGKey, error) { +func parseGPGKey(ownerID int64, e *openpgp.Entity, verified bool) (*GPGKey, error) { pubkey := e.PrimaryKey expiry := getExpiryTime(e) @@ -301,20 +150,22 @@ func parseGPGKey(ownerID int64, e *openpgp.Entity) (*GPGKey, error) { } email := strings.ToLower(strings.TrimSpace(ident.UserId.Email)) for _, e := range userEmails { - if e.Email == email { + if e.IsActivated && e.LowerEmail == email { emails = append(emails, e) break } } } - // In the case no email as been found - if len(emails) == 0 { - failedEmails := make([]string, 0, len(e.Identities)) - for _, ident := range e.Identities { - failedEmails = append(failedEmails, ident.UserId.Email) + if !verified { + // In the case no email as been found + if len(emails) == 0 { + failedEmails := make([]string, 0, len(e.Identities)) + for _, ident := range e.Identities { + failedEmails = append(failedEmails, ident.UserId.Email) + } + return nil, ErrGPGNoEmailFound{failedEmails, e.PrimaryKey.KeyIdString()} } - return nil, ErrGPGNoEmailFound{failedEmails} } content, err := base64EncPubKey(pubkey) @@ -330,6 +181,7 @@ func parseGPGKey(ownerID int64, e *openpgp.Entity) (*GPGKey, error) { ExpiredUnix: timeutil.TimeStamp(expiry.Unix()), Emails: emails, SubsKey: subkeys, + Verified: verified, CanSign: pubkey.CanSign(), CanEncryptComms: pubkey.PubKeyAlgo.CanEncrypt(), CanEncryptStorage: pubkey.PubKeyAlgo.CanEncrypt(), @@ -378,545 +230,32 @@ func DeleteGPGKey(doer *User, id int64) (err error) { return sess.Commit() } -// CommitVerification represents a commit validation of signature -type CommitVerification struct { - Verified bool - Warning bool - Reason string - SigningUser *User - CommittingUser *User - SigningEmail string - SigningKey *GPGKey - TrustStatus string -} - -// SignCommit represents a commit with validation of signature. -type SignCommit struct { - Verification *CommitVerification - *UserCommit -} - -const ( - // BadSignature is used as the reason when the signature has a KeyID that is in the db - // but no key that has that ID verifies the signature. This is a suspicious failure. - BadSignature = "gpg.error.probable_bad_signature" - // BadDefaultSignature is used as the reason when the signature has a KeyID that matches the - // default Key but is not verified by the default key. This is a suspicious failure. - BadDefaultSignature = "gpg.error.probable_bad_default_signature" - // NoKeyFound is used as the reason when no key can be found to verify the signature. - NoKeyFound = "gpg.error.no_gpg_keys_found" -) - -func readerFromBase64(s string) (io.Reader, error) { - bs, err := base64.StdEncoding.DecodeString(s) - if err != nil { - return nil, err - } - return bytes.NewBuffer(bs), nil -} - -func populateHash(hashFunc crypto.Hash, msg []byte) (hash.Hash, error) { - h := hashFunc.New() - if _, err := h.Write(msg); err != nil { - return nil, err - } - return h, nil -} - -// readArmoredSign read an armored signature block with the given type. https://sourcegraph.com/github.com/golang/crypto/-/blob/openpgp/read.go#L24:6-24:17 -func readArmoredSign(r io.Reader) (body io.Reader, err error) { - block, err := armor.Decode(r) - if err != nil { - return - } - if block.Type != openpgp.SignatureType { - return nil, fmt.Errorf("expected '" + openpgp.SignatureType + "', got: " + block.Type) - } - return block.Body, nil -} - -func extractSignature(s string) (*packet.Signature, error) { - r, err := readArmoredSign(strings.NewReader(s)) - if err != nil { - return nil, fmt.Errorf("Failed to read signature armor") - } - p, err := packet.Read(r) - if err != nil { - return nil, fmt.Errorf("Failed to read signature packet") - } - sig, ok := p.(*packet.Signature) - if !ok { - return nil, fmt.Errorf("Packet is not a signature") - } - return sig, nil -} - -func verifySign(s *packet.Signature, h hash.Hash, k *GPGKey) error { - // Check if key can sign - if !k.CanSign { - return fmt.Errorf("key can not sign") - } - // Decode key - pkey, err := base64DecPubKey(k.Content) - if err != nil { - return err - } - return pkey.VerifySignature(h, s) -} - -func hashAndVerify(sig *packet.Signature, payload string, k *GPGKey, committer, signer *User, email string) *CommitVerification { - // Generating hash of commit - hash, err := populateHash(sig.Hash, []byte(payload)) - if err != nil { // Skipping failed to generate hash - log.Error("PopulateHash: %v", err) - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.generate_hash", - } - } - - if err := verifySign(sig, hash, k); err == nil { - return &CommitVerification{ // Everything is ok - CommittingUser: committer, - Verified: true, - Reason: fmt.Sprintf("%s / %s", signer.Name, k.KeyID), - SigningUser: signer, - SigningKey: k, - SigningEmail: email, - } - } - return nil -} - -func hashAndVerifyWithSubKeys(sig *packet.Signature, payload string, k *GPGKey, committer, signer *User, email string) *CommitVerification { - commitVerification := hashAndVerify(sig, payload, k, committer, signer, email) - if commitVerification != nil { - return commitVerification - } - - // And test also SubsKey - for _, sk := range k.SubsKey { - commitVerification := hashAndVerify(sig, payload, sk, committer, signer, email) - if commitVerification != nil { - return commitVerification - } - } - return nil -} - -func hashAndVerifyForKeyID(sig *packet.Signature, payload string, committer *User, keyID, name, email string) *CommitVerification { - if keyID == "" { - return nil - } - keys, err := GetGPGKeysByKeyID(keyID) - if err != nil { - log.Error("GetGPGKeysByKeyID: %v", err) - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.failed_retrieval_gpg_keys", - } - } - if len(keys) == 0 { - return nil - } +func checkKeyEmails(email string, keys ...*GPGKey) (bool, string) { + uid := int64(0) + var userEmails []*EmailAddress + var user *User for _, key := range keys { - var primaryKeys []*GPGKey - if key.PrimaryKeyID != "" { - primaryKeys, err = GetGPGKeysByKeyID(key.PrimaryKeyID) - if err != nil { - log.Error("GetGPGKeysByKeyID: %v", err) - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.failed_retrieval_gpg_keys", - } - } - } - activated := false - if len(email) != 0 { - for _, e := range key.Emails { - if e.IsActivated && strings.EqualFold(e.Email, email) { - activated = true - email = e.Email - break - } - } - if !activated { - for _, pkey := range primaryKeys { - for _, e := range pkey.Emails { - if e.IsActivated && strings.EqualFold(e.Email, email) { - activated = true - email = e.Email - break - } - } - if activated { - break - } - } - } - } else { - for _, e := range key.Emails { - if e.IsActivated { - activated = true - email = e.Email - break - } - } - if !activated { - for _, pkey := range primaryKeys { - for _, e := range pkey.Emails { - if e.IsActivated { - activated = true - email = e.Email - break - } - } - if activated { - break - } - } - } - } - - if !activated { - continue - } - signer := &User{ - Name: name, - Email: email, - } - if key.OwnerID != 0 { - owner, err := GetUserByID(key.OwnerID) - if err == nil { - signer = owner - } else if !IsErrUserNotExist(err) { - log.Error("Failed to GetUserByID: %d for key ID: %d (%s) %v", key.OwnerID, key.ID, key.KeyID, err) - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.no_committer_account", - } - } - } - commitVerification := hashAndVerifyWithSubKeys(sig, payload, key, committer, signer, email) - if commitVerification != nil { - return commitVerification - } - } - // This is a bad situation ... We have a key id that is in our database but the signature doesn't match. - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Warning: true, - Reason: BadSignature, - } -} - -// ParseCommitWithSignature check if signature is good against keystore. -func ParseCommitWithSignature(c *git.Commit) *CommitVerification { - var committer *User - if c.Committer != nil { - var err error - // Find Committer account - committer, err = GetUserByEmail(c.Committer.Email) // This finds the user by primary email or activated email so commit will not be valid if email is not - if err != nil { // Skipping not user for commiter - committer = &User{ - Name: c.Committer.Name, - Email: c.Committer.Email, - } - // We can expect this to often be an ErrUserNotExist. in the case - // it is not, however, it is important to log it. - if !IsErrUserNotExist(err) { - log.Error("GetUserByEmail: %v", err) - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.no_committer_account", - } + for _, e := range key.Emails { + if e.IsActivated && (email == "" || strings.EqualFold(e.Email, email)) { + return true, e.Email } - - } - } - - // If no signature just report the committer - if c.Signature == nil { - return &CommitVerification{ - CommittingUser: committer, - Verified: false, // Default value - Reason: "gpg.error.not_signed_commit", // Default value - } - } - - // Parsing signature - sig, err := extractSignature(c.Signature.Signature) - if err != nil { // Skipping failed to extract sign - log.Error("SignatureRead err: %v", err) - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.extract_sign", } - } - - keyID := "" - if sig.IssuerKeyId != nil && (*sig.IssuerKeyId) != 0 { - keyID = fmt.Sprintf("%X", *sig.IssuerKeyId) - } - if keyID == "" && sig.IssuerFingerprint != nil && len(sig.IssuerFingerprint) > 0 { - keyID = fmt.Sprintf("%X", sig.IssuerFingerprint[12:20]) - } - defaultReason := NoKeyFound - - // First check if the sig has a keyID and if so just look at that - if commitVerification := hashAndVerifyForKeyID( - sig, - c.Signature.Payload, - committer, - keyID, - setting.AppName, - ""); commitVerification != nil { - if commitVerification.Reason == BadSignature { - defaultReason = BadSignature - } else { - return commitVerification - } - } - - // Now try to associate the signature with the committer, if present - if committer.ID != 0 { - keys, err := ListGPGKeys(committer.ID, ListOptions{}) - if err != nil { // Skipping failed to get gpg keys of user - log.Error("ListGPGKeys: %v", err) - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.failed_retrieval_gpg_keys", + if key.Verified && key.OwnerID != 0 { + if uid != key.OwnerID { + userEmails, _ = GetEmailAddresses(key.OwnerID) + uid = key.OwnerID + user = &User{ID: uid} + _, _ = GetUser(user) } - } - - for _, k := range keys { - // Pre-check (& optimization) that emails attached to key can be attached to the commiter email and can validate - canValidate := false - email := "" - for _, e := range k.Emails { - if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) { - canValidate = true - email = e.Email - break + for _, e := range userEmails { + if e.IsActivated && (email == "" || strings.EqualFold(e.Email, email)) { + return true, e.Email } } - if !canValidate { - continue // Skip this key - } - - commitVerification := hashAndVerifyWithSubKeys(sig, c.Signature.Payload, k, committer, committer, email) - if commitVerification != nil { - return commitVerification + if user.KeepEmailPrivate && strings.EqualFold(email, user.GetEmail()) { + return true, user.GetEmail() } } } - - if setting.Repository.Signing.SigningKey != "" && setting.Repository.Signing.SigningKey != "default" && setting.Repository.Signing.SigningKey != "none" { - // OK we should try the default key - gpgSettings := git.GPGSettings{ - Sign: true, - KeyID: setting.Repository.Signing.SigningKey, - Name: setting.Repository.Signing.SigningName, - Email: setting.Repository.Signing.SigningEmail, - } - if err := gpgSettings.LoadPublicKeyContent(); err != nil { - log.Error("Error getting default signing key: %s %v", gpgSettings.KeyID, err) - } else if commitVerification := verifyWithGPGSettings(&gpgSettings, sig, c.Signature.Payload, committer, keyID); commitVerification != nil { - if commitVerification.Reason == BadSignature { - defaultReason = BadSignature - } else { - return commitVerification - } - } - } - - defaultGPGSettings, err := c.GetRepositoryDefaultPublicGPGKey(false) - if err != nil { - log.Error("Error getting default public gpg key: %v", err) - } else if defaultGPGSettings == nil { - log.Warn("Unable to get defaultGPGSettings for unattached commit: %s", c.ID.String()) - } else if defaultGPGSettings.Sign { - if commitVerification := verifyWithGPGSettings(defaultGPGSettings, sig, c.Signature.Payload, committer, keyID); commitVerification != nil { - if commitVerification.Reason == BadSignature { - defaultReason = BadSignature - } else { - return commitVerification - } - } - } - - return &CommitVerification{ // Default at this stage - CommittingUser: committer, - Verified: false, - Warning: defaultReason != NoKeyFound, - Reason: defaultReason, - SigningKey: &GPGKey{ - KeyID: keyID, - }, - } -} - -func verifyWithGPGSettings(gpgSettings *git.GPGSettings, sig *packet.Signature, payload string, committer *User, keyID string) *CommitVerification { - // First try to find the key in the db - if commitVerification := hashAndVerifyForKeyID(sig, payload, committer, gpgSettings.KeyID, gpgSettings.Name, gpgSettings.Email); commitVerification != nil { - return commitVerification - } - - // Otherwise we have to parse the key - ekeys, err := checkArmoredGPGKeyString(gpgSettings.PublicKeyContent) - if err != nil { - log.Error("Unable to get default signing key: %v", err) - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.generate_hash", - } - } - for _, ekey := range ekeys { - pubkey := ekey.PrimaryKey - content, err := base64EncPubKey(pubkey) - if err != nil { - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.generate_hash", - } - } - k := &GPGKey{ - Content: content, - CanSign: pubkey.CanSign(), - KeyID: pubkey.KeyIdString(), - } - for _, subKey := range ekey.Subkeys { - content, err := base64EncPubKey(subKey.PublicKey) - if err != nil { - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.generate_hash", - } - } - k.SubsKey = append(k.SubsKey, &GPGKey{ - Content: content, - CanSign: subKey.PublicKey.CanSign(), - KeyID: subKey.PublicKey.KeyIdString(), - }) - } - if commitVerification := hashAndVerifyWithSubKeys(sig, payload, k, committer, &User{ - Name: gpgSettings.Name, - Email: gpgSettings.Email, - }, gpgSettings.Email); commitVerification != nil { - return commitVerification - } - if keyID == k.KeyID { - // This is a bad situation ... We have a key id that matches our default key but the signature doesn't match. - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Warning: true, - Reason: BadSignature, - } - } - } - return nil -} - -// ParseCommitsWithSignature checks if signaute of commits are corresponding to users gpg keys. -func ParseCommitsWithSignature(oldCommits *list.List, repository *Repository) *list.List { - var ( - newCommits = list.New() - e = oldCommits.Front() - ) - keyMap := map[string]bool{} - - for e != nil { - c := e.Value.(UserCommit) - signCommit := SignCommit{ - UserCommit: &c, - Verification: ParseCommitWithSignature(c.Commit), - } - - _ = CalculateTrustStatus(signCommit.Verification, repository, &keyMap) - - newCommits.PushBack(signCommit) - e = e.Next() - } - return newCommits -} - -// CalculateTrustStatus will calculate the TrustStatus for a commit verification within a repository -func CalculateTrustStatus(verification *CommitVerification, repository *Repository, keyMap *map[string]bool) (err error) { - if !verification.Verified { - return - } - - // There are several trust models in Gitea - trustModel := repository.GetTrustModel() - - // In the Committer trust model a signature is trusted if it matches the committer - // - it doesn't matter if they're a collaborator, the owner, Gitea or Github - // NB: This model is commit verification only - if trustModel == CommitterTrustModel { - // default to "unmatched" - verification.TrustStatus = "unmatched" - - // We can only verify against users in our database but the default key will match - // against by email if it is not in the db. - if (verification.SigningUser.ID != 0 && - verification.CommittingUser.ID == verification.SigningUser.ID) || - (verification.SigningUser.ID == 0 && verification.CommittingUser.ID == 0 && - verification.SigningUser.Email == verification.CommittingUser.Email) { - verification.TrustStatus = "trusted" - } - return - } - - // Now we drop to the more nuanced trust models... - verification.TrustStatus = "trusted" - - if verification.SigningUser.ID == 0 { - // This commit is signed by the default key - but this key is not assigned to a user in the DB. - - // However in the CollaboratorCommitterTrustModel we cannot mark this as trusted - // unless the default key matches the email of a non-user. - if trustModel == CollaboratorCommitterTrustModel && (verification.CommittingUser.ID != 0 || - verification.SigningUser.Email != verification.CommittingUser.Email) { - verification.TrustStatus = "untrusted" - } - return - } - - var isMember bool - if keyMap != nil { - var has bool - isMember, has = (*keyMap)[verification.SigningKey.KeyID] - if !has { - isMember, err = repository.IsOwnerMemberCollaborator(verification.SigningUser.ID) - (*keyMap)[verification.SigningKey.KeyID] = isMember - } - } else { - isMember, err = repository.IsOwnerMemberCollaborator(verification.SigningUser.ID) - } - - if !isMember { - verification.TrustStatus = "untrusted" - if verification.CommittingUser.ID != verification.SigningUser.ID { - // The committing user and the signing user are not the same - // This should be marked as questionable unless the signing user is a collaborator/team member etc. - verification.TrustStatus = "unmatched" - } - } else if trustModel == CollaboratorCommitterTrustModel && verification.CommittingUser.ID != verification.SigningUser.ID { - // The committing user and the signing user are not the same and our trustmodel states that they must match - verification.TrustStatus = "unmatched" - } - - return + return false, email } diff --git a/models/gpg_key_add.go b/models/gpg_key_add.go new file mode 100644 index 000000000000..1e589e7fee52 --- /dev/null +++ b/models/gpg_key_add.go @@ -0,0 +1,125 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package models + +import ( + "strings" + + "code.gitea.io/gitea/modules/log" + + "github.com/keybase/go-crypto/openpgp" +) + +// __________________ ________ ____ __. +// / _____/\______ \/ _____/ | |/ _|____ ___.__. +// / \ ___ | ___/ \ ___ | <_/ __ < | | +// \ \_\ \| | \ \_\ \ | | \ ___/\___ | +// \______ /|____| \______ / |____|__ \___ > ____| +// \/ \/ \/ \/\/ +// _____ .___ .___ +// / _ \ __| _/__| _/ +// / /_\ \ / __ |/ __ | +// / | \/ /_/ / /_/ | +// \____|__ /\____ \____ | +// \/ \/ \/ + +// This file contains functions relating to adding GPG Keys + +// addGPGKey add key, import and subkeys to database +func addGPGKey(e Engine, key *GPGKey, content string) (err error) { + // Add GPGKeyImport + if _, err = e.Insert(GPGKeyImport{ + KeyID: key.KeyID, + Content: content, + }); err != nil { + return err + } + // Save GPG primary key. + if _, err = e.Insert(key); err != nil { + return err + } + // Save GPG subs key. + for _, subkey := range key.SubsKey { + if err := addGPGSubKey(e, subkey); err != nil { + return err + } + } + return nil +} + +// addGPGSubKey add subkeys to database +func addGPGSubKey(e Engine, key *GPGKey) (err error) { + // Save GPG primary key. + if _, err = e.Insert(key); err != nil { + return err + } + // Save GPG subs key. + for _, subkey := range key.SubsKey { + if err := addGPGSubKey(e, subkey); err != nil { + return err + } + } + return nil +} + +// AddGPGKey adds new public key to database. +func AddGPGKey(ownerID int64, content, token, signature string) ([]*GPGKey, error) { + ekeys, err := checkArmoredGPGKeyString(content) + if err != nil { + return nil, err + } + + sess := x.NewSession() + defer sess.Close() + if err = sess.Begin(); err != nil { + return nil, err + } + keys := make([]*GPGKey, 0, len(ekeys)) + + verified := false + // Handle provided signature + if signature != "" { + signer, err := openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token), strings.NewReader(signature)) + if err != nil { + signer, err = openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token+"\n"), strings.NewReader(signature)) + } + if err != nil { + signer, err = openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token+"\r\n"), strings.NewReader(signature)) + } + if err != nil { + log.Error("Unable to validate token signature. Error: %v", err) + return nil, ErrGPGInvalidTokenSignature{ + ID: ekeys[0].PrimaryKey.KeyIdString(), + Wrapped: err, + } + } + ekeys = []*openpgp.Entity{signer} + verified = true + } + + for _, ekey := range ekeys { + // Key ID cannot be duplicated. + has, err := sess.Where("key_id=?", ekey.PrimaryKey.KeyIdString()). + Get(new(GPGKey)) + if err != nil { + return nil, err + } else if has { + return nil, ErrGPGKeyIDAlreadyUsed{ekey.PrimaryKey.KeyIdString()} + } + + // Get DB session + + key, err := parseGPGKey(ownerID, ekey, verified) + if err != nil { + return nil, err + } + + if err = addGPGKey(sess, key, content); err != nil { + return nil, err + } + keys = append(keys, key) + } + return keys, sess.Commit() +} diff --git a/models/gpg_key_commit_verification.go b/models/gpg_key_commit_verification.go new file mode 100644 index 000000000000..f0c27f13aa80 --- /dev/null +++ b/models/gpg_key_commit_verification.go @@ -0,0 +1,520 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package models + +import ( + "container/list" + "fmt" + "hash" + "strings" + + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" + + "github.com/keybase/go-crypto/openpgp/packet" +) + +// __________________ ________ ____ __. +// / _____/\______ \/ _____/ | |/ _|____ ___.__. +// / \ ___ | ___/ \ ___ | <_/ __ < | | +// \ \_\ \| | \ \_\ \ | | \ ___/\___ | +// \______ /|____| \______ / |____|__ \___ > ____| +// \/ \/ \/ \/\/ +// _________ .__ __ +// \_ ___ \ ____ _____ _____ |__|/ |_ +// / \ \/ / _ \ / \ / \| \ __\ +// \ \___( <_> ) Y Y \ Y Y \ || | +// \______ /\____/|__|_| /__|_| /__||__| +// \/ \/ \/ +// ____ ____ .__ _____.__ __ .__ +// \ \ / /___________|__|/ ____\__| ____ _____ _/ |_|__| ____ ____ +// \ Y // __ \_ __ \ \ __\| |/ ___\\__ \\ __\ |/ _ \ / \ +// \ /\ ___/| | \/ || | | \ \___ / __ \| | | ( <_> ) | \ +// \___/ \___ >__| |__||__| |__|\___ >____ /__| |__|\____/|___| / +// \/ \/ \/ \/ + +// This file provides functions relating commit verification + +// CommitVerification represents a commit validation of signature +type CommitVerification struct { + Verified bool + Warning bool + Reason string + SigningUser *User + CommittingUser *User + SigningEmail string + SigningKey *GPGKey + TrustStatus string +} + +// SignCommit represents a commit with validation of signature. +type SignCommit struct { + Verification *CommitVerification + *UserCommit +} + +const ( + // BadSignature is used as the reason when the signature has a KeyID that is in the db + // but no key that has that ID verifies the signature. This is a suspicious failure. + BadSignature = "gpg.error.probable_bad_signature" + // BadDefaultSignature is used as the reason when the signature has a KeyID that matches the + // default Key but is not verified by the default key. This is a suspicious failure. + BadDefaultSignature = "gpg.error.probable_bad_default_signature" + // NoKeyFound is used as the reason when no key can be found to verify the signature. + NoKeyFound = "gpg.error.no_gpg_keys_found" +) + +// ParseCommitsWithSignature checks if signaute of commits are corresponding to users gpg keys. +func ParseCommitsWithSignature(oldCommits *list.List, repository *Repository) *list.List { + var ( + newCommits = list.New() + e = oldCommits.Front() + ) + keyMap := map[string]bool{} + + for e != nil { + c := e.Value.(UserCommit) + signCommit := SignCommit{ + UserCommit: &c, + Verification: ParseCommitWithSignature(c.Commit), + } + + _ = CalculateTrustStatus(signCommit.Verification, repository, &keyMap) + + newCommits.PushBack(signCommit) + e = e.Next() + } + return newCommits +} + +// ParseCommitWithSignature check if signature is good against keystore. +func ParseCommitWithSignature(c *git.Commit) *CommitVerification { + var committer *User + if c.Committer != nil { + var err error + // Find Committer account + committer, err = GetUserByEmail(c.Committer.Email) // This finds the user by primary email or activated email so commit will not be valid if email is not + if err != nil { // Skipping not user for committer + committer = &User{ + Name: c.Committer.Name, + Email: c.Committer.Email, + } + // We can expect this to often be an ErrUserNotExist. in the case + // it is not, however, it is important to log it. + if !IsErrUserNotExist(err) { + log.Error("GetUserByEmail: %v", err) + return &CommitVerification{ + CommittingUser: committer, + Verified: false, + Reason: "gpg.error.no_committer_account", + } + } + + } + } + + // If no signature just report the committer + if c.Signature == nil { + return &CommitVerification{ + CommittingUser: committer, + Verified: false, // Default value + Reason: "gpg.error.not_signed_commit", // Default value + } + } + + // Parsing signature + sig, err := extractSignature(c.Signature.Signature) + if err != nil { // Skipping failed to extract sign + log.Error("SignatureRead err: %v", err) + return &CommitVerification{ + CommittingUser: committer, + Verified: false, + Reason: "gpg.error.extract_sign", + } + } + + keyID := "" + if sig.IssuerKeyId != nil && (*sig.IssuerKeyId) != 0 { + keyID = fmt.Sprintf("%X", *sig.IssuerKeyId) + } + if keyID == "" && sig.IssuerFingerprint != nil && len(sig.IssuerFingerprint) > 0 { + keyID = fmt.Sprintf("%X", sig.IssuerFingerprint[12:20]) + } + defaultReason := NoKeyFound + + // First check if the sig has a keyID and if so just look at that + if commitVerification := hashAndVerifyForKeyID( + sig, + c.Signature.Payload, + committer, + keyID, + setting.AppName, + ""); commitVerification != nil { + if commitVerification.Reason == BadSignature { + defaultReason = BadSignature + } else { + return commitVerification + } + } + + // Now try to associate the signature with the committer, if present + if committer.ID != 0 { + keys, err := ListGPGKeys(committer.ID, ListOptions{}) + if err != nil { // Skipping failed to get gpg keys of user + log.Error("ListGPGKeys: %v", err) + return &CommitVerification{ + CommittingUser: committer, + Verified: false, + Reason: "gpg.error.failed_retrieval_gpg_keys", + } + } + + committerEmailAddresses, _ := GetEmailAddresses(committer.ID) + activated := false + for _, e := range committerEmailAddresses { + if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) { + activated = true + break + } + } + + for _, k := range keys { + // Pre-check (& optimization) that emails attached to key can be attached to the committer email and can validate + canValidate := false + email := "" + if k.Verified && activated { + canValidate = true + email = c.Committer.Email + } + if !canValidate { + for _, e := range k.Emails { + if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) { + canValidate = true + email = e.Email + break + } + } + } + if !canValidate { + continue // Skip this key + } + + commitVerification := hashAndVerifyWithSubKeysCommitVerification(sig, c.Signature.Payload, k, committer, committer, email) + if commitVerification != nil { + return commitVerification + } + } + } + + if setting.Repository.Signing.SigningKey != "" && setting.Repository.Signing.SigningKey != "default" && setting.Repository.Signing.SigningKey != "none" { + // OK we should try the default key + gpgSettings := git.GPGSettings{ + Sign: true, + KeyID: setting.Repository.Signing.SigningKey, + Name: setting.Repository.Signing.SigningName, + Email: setting.Repository.Signing.SigningEmail, + } + if err := gpgSettings.LoadPublicKeyContent(); err != nil { + log.Error("Error getting default signing key: %s %v", gpgSettings.KeyID, err) + } else if commitVerification := verifyWithGPGSettings(&gpgSettings, sig, c.Signature.Payload, committer, keyID); commitVerification != nil { + if commitVerification.Reason == BadSignature { + defaultReason = BadSignature + } else { + return commitVerification + } + } + } + + defaultGPGSettings, err := c.GetRepositoryDefaultPublicGPGKey(false) + if err != nil { + log.Error("Error getting default public gpg key: %v", err) + } else if defaultGPGSettings == nil { + log.Warn("Unable to get defaultGPGSettings for unattached commit: %s", c.ID.String()) + } else if defaultGPGSettings.Sign { + if commitVerification := verifyWithGPGSettings(defaultGPGSettings, sig, c.Signature.Payload, committer, keyID); commitVerification != nil { + if commitVerification.Reason == BadSignature { + defaultReason = BadSignature + } else { + return commitVerification + } + } + } + + return &CommitVerification{ // Default at this stage + CommittingUser: committer, + Verified: false, + Warning: defaultReason != NoKeyFound, + Reason: defaultReason, + SigningKey: &GPGKey{ + KeyID: keyID, + }, + } +} + +func verifyWithGPGSettings(gpgSettings *git.GPGSettings, sig *packet.Signature, payload string, committer *User, keyID string) *CommitVerification { + // First try to find the key in the db + if commitVerification := hashAndVerifyForKeyID(sig, payload, committer, gpgSettings.KeyID, gpgSettings.Name, gpgSettings.Email); commitVerification != nil { + return commitVerification + } + + // Otherwise we have to parse the key + ekeys, err := checkArmoredGPGKeyString(gpgSettings.PublicKeyContent) + if err != nil { + log.Error("Unable to get default signing key: %v", err) + return &CommitVerification{ + CommittingUser: committer, + Verified: false, + Reason: "gpg.error.generate_hash", + } + } + for _, ekey := range ekeys { + pubkey := ekey.PrimaryKey + content, err := base64EncPubKey(pubkey) + if err != nil { + return &CommitVerification{ + CommittingUser: committer, + Verified: false, + Reason: "gpg.error.generate_hash", + } + } + k := &GPGKey{ + Content: content, + CanSign: pubkey.CanSign(), + KeyID: pubkey.KeyIdString(), + } + for _, subKey := range ekey.Subkeys { + content, err := base64EncPubKey(subKey.PublicKey) + if err != nil { + return &CommitVerification{ + CommittingUser: committer, + Verified: false, + Reason: "gpg.error.generate_hash", + } + } + k.SubsKey = append(k.SubsKey, &GPGKey{ + Content: content, + CanSign: subKey.PublicKey.CanSign(), + KeyID: subKey.PublicKey.KeyIdString(), + }) + } + if commitVerification := hashAndVerifyWithSubKeysCommitVerification(sig, payload, k, committer, &User{ + Name: gpgSettings.Name, + Email: gpgSettings.Email, + }, gpgSettings.Email); commitVerification != nil { + return commitVerification + } + if keyID == k.KeyID { + // This is a bad situation ... We have a key id that matches our default key but the signature doesn't match. + return &CommitVerification{ + CommittingUser: committer, + Verified: false, + Warning: true, + Reason: BadSignature, + } + } + } + return nil +} + +func verifySign(s *packet.Signature, h hash.Hash, k *GPGKey) error { + // Check if key can sign + if !k.CanSign { + return fmt.Errorf("key can not sign") + } + // Decode key + pkey, err := base64DecPubKey(k.Content) + if err != nil { + return err + } + return pkey.VerifySignature(h, s) +} + +func hashAndVerify(sig *packet.Signature, payload string, k *GPGKey) (*GPGKey, error) { + // Generating hash of commit + hash, err := populateHash(sig.Hash, []byte(payload)) + if err != nil { // Skipping as failed to generate hash + log.Error("PopulateHash: %v", err) + return nil, err + } + // We will ignore errors in verification as they don't need to be propagated up + err = verifySign(sig, hash, k) + if err != nil { + return nil, nil + } + return k, nil +} + +func hashAndVerifyWithSubKeys(sig *packet.Signature, payload string, k *GPGKey) (*GPGKey, error) { + verified, err := hashAndVerify(sig, payload, k) + if err != nil || verified != nil { + return verified, err + } + for _, sk := range k.SubsKey { + verified, err := hashAndVerify(sig, payload, sk) + if err != nil || verified != nil { + return verified, err + } + } + return nil, nil +} + +func hashAndVerifyWithSubKeysCommitVerification(sig *packet.Signature, payload string, k *GPGKey, committer, signer *User, email string) *CommitVerification { + key, err := hashAndVerifyWithSubKeys(sig, payload, k) + if err != nil { // Skipping failed to generate hash + return &CommitVerification{ + CommittingUser: committer, + Verified: false, + Reason: "gpg.error.generate_hash", + } + } + + if key != nil { + return &CommitVerification{ // Everything is ok + CommittingUser: committer, + Verified: true, + Reason: fmt.Sprintf("%s / %s", signer.Name, key.KeyID), + SigningUser: signer, + SigningKey: key, + SigningEmail: email, + } + } + return nil +} + +func hashAndVerifyForKeyID(sig *packet.Signature, payload string, committer *User, keyID, name, email string) *CommitVerification { + if keyID == "" { + return nil + } + keys, err := GetGPGKeysByKeyID(keyID) + if err != nil { + log.Error("GetGPGKeysByKeyID: %v", err) + return &CommitVerification{ + CommittingUser: committer, + Verified: false, + Reason: "gpg.error.failed_retrieval_gpg_keys", + } + } + if len(keys) == 0 { + return nil + } + for _, key := range keys { + var primaryKeys []*GPGKey + if key.PrimaryKeyID != "" { + primaryKeys, err = GetGPGKeysByKeyID(key.PrimaryKeyID) + if err != nil { + log.Error("GetGPGKeysByKeyID: %v", err) + return &CommitVerification{ + CommittingUser: committer, + Verified: false, + Reason: "gpg.error.failed_retrieval_gpg_keys", + } + } + } + + activated, email := checkKeyEmails(email, append([]*GPGKey{key}, primaryKeys...)...) + if !activated { + continue + } + + signer := &User{ + Name: name, + Email: email, + } + if key.OwnerID != 0 { + owner, err := GetUserByID(key.OwnerID) + if err == nil { + signer = owner + } else if !IsErrUserNotExist(err) { + log.Error("Failed to GetUserByID: %d for key ID: %d (%s) %v", key.OwnerID, key.ID, key.KeyID, err) + return &CommitVerification{ + CommittingUser: committer, + Verified: false, + Reason: "gpg.error.no_committer_account", + } + } + } + commitVerification := hashAndVerifyWithSubKeysCommitVerification(sig, payload, key, committer, signer, email) + if commitVerification != nil { + return commitVerification + } + } + // This is a bad situation ... We have a key id that is in our database but the signature doesn't match. + return &CommitVerification{ + CommittingUser: committer, + Verified: false, + Warning: true, + Reason: BadSignature, + } +} + +// CalculateTrustStatus will calculate the TrustStatus for a commit verification within a repository +func CalculateTrustStatus(verification *CommitVerification, repository *Repository, keyMap *map[string]bool) (err error) { + if !verification.Verified { + return + } + + // There are several trust models in Gitea + trustModel := repository.GetTrustModel() + + // In the Committer trust model a signature is trusted if it matches the committer + // - it doesn't matter if they're a collaborator, the owner, Gitea or Github + // NB: This model is commit verification only + if trustModel == CommitterTrustModel { + // default to "unmatched" + verification.TrustStatus = "unmatched" + + // We can only verify against users in our database but the default key will match + // against by email if it is not in the db. + if (verification.SigningUser.ID != 0 && + verification.CommittingUser.ID == verification.SigningUser.ID) || + (verification.SigningUser.ID == 0 && verification.CommittingUser.ID == 0 && + verification.SigningUser.Email == verification.CommittingUser.Email) { + verification.TrustStatus = "trusted" + } + return + } + + // Now we drop to the more nuanced trust models... + verification.TrustStatus = "trusted" + + if verification.SigningUser.ID == 0 { + // This commit is signed by the default key - but this key is not assigned to a user in the DB. + + // However in the CollaboratorCommitterTrustModel we cannot mark this as trusted + // unless the default key matches the email of a non-user. + if trustModel == CollaboratorCommitterTrustModel && (verification.CommittingUser.ID != 0 || + verification.SigningUser.Email != verification.CommittingUser.Email) { + verification.TrustStatus = "untrusted" + } + return + } + + var isMember bool + if keyMap != nil { + var has bool + isMember, has = (*keyMap)[verification.SigningKey.KeyID] + if !has { + isMember, err = repository.IsOwnerMemberCollaborator(verification.SigningUser.ID) + (*keyMap)[verification.SigningKey.KeyID] = isMember + } + } else { + isMember, err = repository.IsOwnerMemberCollaborator(verification.SigningUser.ID) + } + + if !isMember { + verification.TrustStatus = "untrusted" + if verification.CommittingUser.ID != verification.SigningUser.ID { + // The committing user and the signing user are not the same + // This should be marked as questionable unless the signing user is a collaborator/team member etc. + verification.TrustStatus = "unmatched" + } + } else if trustModel == CollaboratorCommitterTrustModel && verification.CommittingUser.ID != verification.SigningUser.ID { + // The committing user and the signing user are not the same and our trustmodel states that they must match + verification.TrustStatus = "unmatched" + } + + return +} diff --git a/models/gpg_key_common.go b/models/gpg_key_common.go new file mode 100644 index 000000000000..72803625eeb8 --- /dev/null +++ b/models/gpg_key_common.go @@ -0,0 +1,137 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package models + +import ( + "bytes" + "crypto" + "encoding/base64" + "fmt" + "hash" + "io" + "strings" + "time" + + "github.com/keybase/go-crypto/openpgp" + "github.com/keybase/go-crypto/openpgp/armor" + "github.com/keybase/go-crypto/openpgp/packet" +) + +// __________________ ________ ____ __. +// / _____/\______ \/ _____/ | |/ _|____ ___.__. +// / \ ___ | ___/ \ ___ | <_/ __ < | | +// \ \_\ \| | \ \_\ \ | | \ ___/\___ | +// \______ /|____| \______ / |____|__ \___ > ____| +// \/ \/ \/ \/\/ +// _________ +// \_ ___ \ ____ _____ _____ ____ ____ +// / \ \/ / _ \ / \ / \ / _ \ / \ +// \ \___( <_> ) Y Y \ Y Y ( <_> ) | \ +// \______ /\____/|__|_| /__|_| /\____/|___| / +// \/ \/ \/ \/ + +// This file provides common functions relating to GPG Keys + +// checkArmoredGPGKeyString checks if the given key string is a valid GPG armored key. +// The function returns the actual public key on success +func checkArmoredGPGKeyString(content string) (openpgp.EntityList, error) { + list, err := openpgp.ReadArmoredKeyRing(strings.NewReader(content)) + if err != nil { + return nil, ErrGPGKeyParsing{err} + } + return list, nil +} + +// base64EncPubKey encode public key content to base 64 +func base64EncPubKey(pubkey *packet.PublicKey) (string, error) { + var w bytes.Buffer + err := pubkey.Serialize(&w) + if err != nil { + return "", err + } + return base64.StdEncoding.EncodeToString(w.Bytes()), nil +} + +func readerFromBase64(s string) (io.Reader, error) { + bs, err := base64.StdEncoding.DecodeString(s) + if err != nil { + return nil, err + } + return bytes.NewBuffer(bs), nil +} + +// base64DecPubKey decode public key content from base 64 +func base64DecPubKey(content string) (*packet.PublicKey, error) { + b, err := readerFromBase64(content) + if err != nil { + return nil, err + } + // Read key + p, err := packet.Read(b) + if err != nil { + return nil, err + } + // Check type + pkey, ok := p.(*packet.PublicKey) + if !ok { + return nil, fmt.Errorf("key is not a public key") + } + return pkey, nil +} + +// getExpiryTime extract the expire time of primary key based on sig +func getExpiryTime(e *openpgp.Entity) time.Time { + expiry := time.Time{} + // Extract self-sign for expire date based on : https://github.com/golang/crypto/blob/master/openpgp/keys.go#L165 + var selfSig *packet.Signature + for _, ident := range e.Identities { + if selfSig == nil { + selfSig = ident.SelfSignature + } else if ident.SelfSignature.IsPrimaryId != nil && *ident.SelfSignature.IsPrimaryId { + selfSig = ident.SelfSignature + break + } + } + if selfSig.KeyLifetimeSecs != nil { + expiry = e.PrimaryKey.CreationTime.Add(time.Duration(*selfSig.KeyLifetimeSecs) * time.Second) + } + return expiry +} + +func populateHash(hashFunc crypto.Hash, msg []byte) (hash.Hash, error) { + h := hashFunc.New() + if _, err := h.Write(msg); err != nil { + return nil, err + } + return h, nil +} + +// readArmoredSign read an armored signature block with the given type. https://sourcegraph.com/github.com/golang/crypto/-/blob/openpgp/read.go#L24:6-24:17 +func readArmoredSign(r io.Reader) (body io.Reader, err error) { + block, err := armor.Decode(r) + if err != nil { + return + } + if block.Type != openpgp.SignatureType { + return nil, fmt.Errorf("expected '" + openpgp.SignatureType + "', got: " + block.Type) + } + return block.Body, nil +} + +func extractSignature(s string) (*packet.Signature, error) { + r, err := readArmoredSign(strings.NewReader(s)) + if err != nil { + return nil, fmt.Errorf("Failed to read signature armor") + } + p, err := packet.Read(r) + if err != nil { + return nil, fmt.Errorf("Failed to read signature packet") + } + sig, ok := p.(*packet.Signature) + if !ok { + return nil, fmt.Errorf("Packet is not a signature") + } + return sig, nil +} diff --git a/models/gpg_key_import.go b/models/gpg_key_import.go new file mode 100644 index 000000000000..bd1d530eca26 --- /dev/null +++ b/models/gpg_key_import.go @@ -0,0 +1,38 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package models + +// __________________ ________ ____ __. +// / _____/\______ \/ _____/ | |/ _|____ ___.__. +// / \ ___ | ___/ \ ___ | <_/ __ < | | +// \ \_\ \| | \ \_\ \ | | \ ___/\___ | +// \______ /|____| \______ / |____|__ \___ > ____| +// \/ \/ \/ \/\/ +// .___ __ +// | | _____ ______ ____________/ |_ +// | |/ \\____ \ / _ \_ __ \ __\ +// | | Y Y \ |_> > <_> ) | \/| | +// |___|__|_| / __/ \____/|__| |__| +// \/|__| + +// This file contains functions related to the original import of a key + +// GPGKeyImport the original import of key +type GPGKeyImport struct { + KeyID string `xorm:"pk CHAR(16) NOT NULL"` + Content string `xorm:"TEXT NOT NULL"` +} + +// GetGPGImportByKeyID returns the import public armored key by given KeyID. +func GetGPGImportByKeyID(keyID string) (*GPGKeyImport, error) { + key := new(GPGKeyImport) + has, err := x.ID(keyID).Get(key) + if err != nil { + return nil, err + } else if !has { + return nil, ErrGPGKeyImportNotExist{keyID} + } + return key, nil +} diff --git a/models/gpg_key_test.go b/models/gpg_key_test.go index c9e099093334..be2d8a223bc6 100644 --- a/models/gpg_key_test.go +++ b/models/gpg_key_test.go @@ -103,6 +103,9 @@ MkM/fdpyc2hY7Dl/+qFmN5MG5yGmMpQcX+RNNR222ibNC1D3wg== =i9b7 -----END PGP PUBLIC KEY BLOCK-----` keys, err := checkArmoredGPGKeyString(testGPGArmor) + if !assert.NotEmpty(t, keys) { + return + } ekey := keys[0] assert.NoError(t, err, "Could not parse a valid GPG armored key", ekey) @@ -189,6 +192,10 @@ Unknown GPG key with good email } func TestCheckGPGUserEmail(t *testing.T) { + assert.NoError(t, PrepareTestDatabase()) + + _ = AssertExistsAndLoadBean(t, &User{ID: 1}).(*User) + testEmailWithUpperCaseLetters := `-----BEGIN PGP PUBLIC KEY BLOCK----- Version: GnuPG v1 @@ -220,11 +227,13 @@ Q0KHb+QcycSgbDx0ZAvdIacuKvBBcbxrsmFUI4LR+oIup0G9gUc0roPvr014jYQL =zHo9 -----END PGP PUBLIC KEY BLOCK-----` - keys, err := AddGPGKey(1, testEmailWithUpperCaseLetters) + keys, err := AddGPGKey(1, testEmailWithUpperCaseLetters, "", "") assert.NoError(t, err) - key := keys[0] - if assert.Len(t, key.Emails, 1) { - assert.Equal(t, "user1@example.com", key.Emails[0].Email) + if assert.NotEmpty(t, keys) { + key := keys[0] + if assert.Len(t, key.Emails, 1) { + assert.Equal(t, "user1@example.com", key.Emails[0].Email) + } } } @@ -374,7 +383,9 @@ epiDVQ== ` keys, err := checkArmoredGPGKeyString(testIssue6599) assert.NoError(t, err) - ekey := keys[0] - expire := getExpiryTime(ekey) - assert.Equal(t, time.Unix(1586105389, 0), expire) + if assert.NotEmpty(t, keys) { + ekey := keys[0] + expire := getExpiryTime(ekey) + assert.Equal(t, time.Unix(1586105389, 0), expire) + } } diff --git a/models/gpg_key_verify.go b/models/gpg_key_verify.go new file mode 100644 index 000000000000..15774dc058e8 --- /dev/null +++ b/models/gpg_key_verify.go @@ -0,0 +1,113 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package models + +import ( + "strconv" + "time" + + "code.gitea.io/gitea/modules/base" + "code.gitea.io/gitea/modules/log" +) + +// __________________ ________ ____ __. +// / _____/\______ \/ _____/ | |/ _|____ ___.__. +// / \ ___ | ___/ \ ___ | <_/ __ < | | +// \ \_\ \| | \ \_\ \ | | \ ___/\___ | +// \______ /|____| \______ / |____|__ \___ > ____| +// \/ \/ \/ \/\/ +// ____ ____ .__ _____ +// \ \ / /___________|__|/ ____\__.__. +// \ Y // __ \_ __ \ \ __< | | +// \ /\ ___/| | \/ || | \___ | +// \___/ \___ >__| |__||__| / ____| +// \/ \/ + +// This file provides functions relating verifying gpg keys + +// VerifyGPGKey marks a GPG key as verified +func VerifyGPGKey(ownerID int64, keyID, token, signature string) (string, error) { + sess := x.NewSession() + defer sess.Close() + if err := sess.Begin(); err != nil { + return "", err + } + + key := new(GPGKey) + + has, err := sess.Where("owner_id = ? AND key_id = ?", ownerID, keyID).Get(key) + if err != nil { + return "", err + } else if !has { + return "", ErrGPGKeyNotExist{} + } + + sig, err := extractSignature(signature) + if err != nil { + return "", ErrGPGInvalidTokenSignature{ + ID: key.KeyID, + Wrapped: err, + } + } + + signer, err := hashAndVerifyWithSubKeys(sig, token, key) + if err != nil { + return "", ErrGPGInvalidTokenSignature{ + ID: key.KeyID, + Wrapped: err, + } + } + if signer == nil { + signer, err = hashAndVerifyWithSubKeys(sig, token+"\n", key) + + if err != nil { + return "", ErrGPGInvalidTokenSignature{ + ID: key.KeyID, + Wrapped: err, + } + } + } + if signer == nil { + signer, err = hashAndVerifyWithSubKeys(sig, token+"\n\n", key) + if err != nil { + return "", ErrGPGInvalidTokenSignature{ + ID: key.KeyID, + Wrapped: err, + } + } + } + + if signer == nil { + log.Error("Unable to validate token signature. Error: %v", err) + return "", ErrGPGInvalidTokenSignature{ + ID: key.KeyID, + } + } + + if signer.PrimaryKeyID != key.KeyID && signer.KeyID != key.KeyID { + return "", ErrGPGKeyNotExist{} + } + + key.Verified = true + if _, err := sess.ID(key.ID).SetExpr("verified", true).Update(new(GPGKey)); err != nil { + return "", err + } + + if err := sess.Commit(); err != nil { + return "", err + } + + return key.KeyID, nil +} + +// VerificationToken returns token for the user that will be valid in minutes (time) +func VerificationToken(user *User, minutes int) string { + return base.EncodeSha256( + time.Now().Truncate(1*time.Minute).Add(time.Duration(minutes)*time.Minute).Format(time.RFC1123Z) + ":" + + user.CreatedUnix.FormatLong() + ":" + + user.Name + ":" + + user.Email + ":" + + strconv.FormatInt(user.ID, 10)) +} diff --git a/models/index.go b/models/index.go new file mode 100644 index 000000000000..18db13c490c7 --- /dev/null +++ b/models/index.go @@ -0,0 +1,113 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package models + +import ( + "errors" + "fmt" + + "code.gitea.io/gitea/modules/setting" +) + +// ResourceIndex represents a resource index which could be used as issue/release and others +// We can create different tables i.e. issue_index, release_index and etc. +type ResourceIndex struct { + GroupID int64 `xorm:"unique"` + MaxIndex int64 `xorm:"index"` +} + +// IssueIndex represents the issue index table +type IssueIndex ResourceIndex + +// upsertResourceIndex the function will not return until it acquires the lock or receives an error. +func upsertResourceIndex(e Engine, tableName string, groupID int64) (err error) { + // An atomic UPSERT operation (INSERT/UPDATE) is the only operation + // that ensures that the key is actually locked. + switch { + case setting.Database.UseSQLite3 || setting.Database.UsePostgreSQL: + _, err = e.Exec(fmt.Sprintf("INSERT INTO %s (group_id, max_index) "+ + "VALUES (?,1) ON CONFLICT (group_id) DO UPDATE SET max_index = %s.max_index+1", + tableName, tableName), groupID) + case setting.Database.UseMySQL: + _, err = e.Exec(fmt.Sprintf("INSERT INTO %s (group_id, max_index) "+ + "VALUES (?,1) ON DUPLICATE KEY UPDATE max_index = max_index+1", tableName), + groupID) + case setting.Database.UseMSSQL: + // https://weblogs.sqlteam.com/dang/2009/01/31/upsert-race-condition-with-merge/ + _, err = e.Exec(fmt.Sprintf("MERGE %s WITH (HOLDLOCK) as target "+ + "USING (SELECT ? AS group_id) AS src "+ + "ON src.group_id = target.group_id "+ + "WHEN MATCHED THEN UPDATE SET target.max_index = target.max_index+1 "+ + "WHEN NOT MATCHED THEN INSERT (group_id, max_index) "+ + "VALUES (src.group_id, 1);", tableName), + groupID) + default: + return fmt.Errorf("database type not supported") + } + return +} + +var ( + // ErrResouceOutdated represents an error when request resource outdated + ErrResouceOutdated = errors.New("resource outdated") + // ErrGetResourceIndexFailed represents an error when resource index retries 3 times + ErrGetResourceIndexFailed = errors.New("get resource index failed") +) + +const ( + maxDupIndexAttempts = 3 +) + +// GetNextResourceIndex retried 3 times to generate a resource index +func GetNextResourceIndex(tableName string, groupID int64) (int64, error) { + for i := 0; i < maxDupIndexAttempts; i++ { + idx, err := getNextResourceIndex(tableName, groupID) + if err == ErrResouceOutdated { + continue + } + if err != nil { + return 0, err + } + return idx, nil + } + return 0, ErrGetResourceIndexFailed +} + +// deleteResouceIndex delete resource index +func deleteResouceIndex(e Engine, tableName string, groupID int64) error { + _, err := e.Exec(fmt.Sprintf("DELETE FROM %s WHERE group_id=?", tableName), groupID) + return err +} + +// getNextResourceIndex return the next index +func getNextResourceIndex(tableName string, groupID int64) (int64, error) { + sess := x.NewSession() + defer sess.Close() + if err := sess.Begin(); err != nil { + return 0, err + } + var preIdx int64 + _, err := sess.SQL(fmt.Sprintf("SELECT max_index FROM %s WHERE group_id = ?", tableName), groupID).Get(&preIdx) + if err != nil { + return 0, err + } + + if err := upsertResourceIndex(sess, tableName, groupID); err != nil { + return 0, err + } + + var curIdx int64 + has, err := sess.SQL(fmt.Sprintf("SELECT max_index FROM %s WHERE group_id = ? AND max_index=?", tableName), groupID, preIdx+1).Get(&curIdx) + if err != nil { + return 0, err + } + if !has { + return 0, ErrResouceOutdated + } + if err := sess.Commit(); err != nil { + return 0, err + } + return curIdx, nil +} diff --git a/models/index_test.go b/models/index_test.go new file mode 100644 index 000000000000..40e570ad9fa7 --- /dev/null +++ b/models/index_test.go @@ -0,0 +1,27 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package models + +import ( + "fmt" + "sync" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestResourceIndex(t *testing.T) { + assert.NoError(t, PrepareTestDatabase()) + + var wg sync.WaitGroup + for i := 0; i < 100; i++ { + wg.Add(1) + go func(i int) { + testInsertIssue(t, fmt.Sprintf("issue %d", i+1), "my issue", 0) + wg.Done() + }(i) + } + wg.Wait() +} diff --git a/models/issue.go b/models/issue.go index 6912df6c28ac..225dfee20f09 100644 --- a/models/issue.go +++ b/models/issue.go @@ -78,9 +78,8 @@ var ( ) const ( - issueTasksRegexpStr = `(^\s*[-*]\s\[[\sxX]\]\s.)|(\n\s*[-*]\s\[[\sxX]\]\s.)` - issueTasksDoneRegexpStr = `(^\s*[-*]\s\[[xX]\]\s.)|(\n\s*[-*]\s\[[xX]\]\s.)` - issueMaxDupIndexAttempts = 3 + issueTasksRegexpStr = `(^\s*[-*]\s\[[\sxX]\]\s.)|(\n\s*[-*]\s\[[\sxX]\]\s.)` + issueTasksDoneRegexpStr = `(^\s*[-*]\s\[[xX]\]\s.)|(\n\s*[-*]\s\[[xX]\]\s.)` ) func init() { @@ -648,8 +647,10 @@ func (issue *Issue) doChangeStatus(e *xorm.Session, doer *User, isMergePull bool } // Update issue count of milestone - if err := updateMilestoneClosedNum(e, issue.MilestoneID); err != nil { - return nil, err + if issue.MilestoneID > 0 { + if err := updateMilestoneCounters(e, issue.MilestoneID); err != nil { + return nil, err + } } if err := issue.updateClosedNum(e); err != nil { @@ -896,23 +897,19 @@ func newIssue(e *xorm.Session, doer *User, opts NewIssueOptions) (err error) { } } - // Milestone validation should happen before insert actual object. - if _, err := e.SetExpr("`index`", "coalesce(MAX(`index`),0)+1"). - Where("repo_id=?", opts.Issue.RepoID). - Insert(opts.Issue); err != nil { - return ErrNewIssueInsert{err} + if opts.Issue.Index <= 0 { + return fmt.Errorf("no issue index provided") + } + if opts.Issue.ID > 0 { + return fmt.Errorf("issue exist") } - inserted, err := getIssueByID(e, opts.Issue.ID) - if err != nil { + if _, err := e.Insert(opts.Issue); err != nil { return err } - // Patch Index with the value calculated by the database - opts.Issue.Index = inserted.Index - if opts.Issue.MilestoneID > 0 { - if _, err = e.Exec("UPDATE `milestone` SET num_issues=num_issues+1 WHERE id=?", opts.Issue.MilestoneID); err != nil { + if err := updateMilestoneCounters(e, opts.Issue.MilestoneID); err != nil { return err } @@ -987,24 +984,13 @@ func newIssue(e *xorm.Session, doer *User, opts NewIssueOptions) (err error) { // NewIssue creates new issue with labels for repository. func NewIssue(repo *Repository, issue *Issue, labelIDs []int64, uuids []string) (err error) { - // Retry several times in case INSERT fails due to duplicate key for (repo_id, index); see #7887 - i := 0 - for { - if err = newIssueAttempt(repo, issue, labelIDs, uuids); err == nil { - return nil - } - if !IsErrNewIssueInsert(err) { - return err - } - if i++; i == issueMaxDupIndexAttempts { - break - } - log.Error("NewIssue: error attempting to insert the new issue; will retry. Original error: %v", err) + idx, err := GetNextResourceIndex("issue_index", repo.ID) + if err != nil { + return fmt.Errorf("generate issue index failed: %v", err) } - return fmt.Errorf("NewIssue: too many errors attempting to insert the new issue. Last error was: %v", err) -} -func newIssueAttempt(repo *Repository, issue *Issue, labelIDs []int64, uuids []string) (err error) { + issue.Index = idx + sess := x.NewSession() defer sess.Close() if err = sess.Begin(); err != nil { @@ -1032,6 +1018,9 @@ func newIssueAttempt(repo *Repository, issue *Issue, labelIDs []int64, uuids []s // GetIssueByIndex returns raw issue without loading attributes by index in a repository. func GetIssueByIndex(repoID, index int64) (*Issue, error) { + if index < 1 { + return nil, ErrIssueNotExist{} + } issue := &Issue{ RepoID: repoID, Index: index, @@ -1086,7 +1075,7 @@ func getIssuesByIDs(e Engine, issueIDs []int64) ([]*Issue, error) { func getIssueIDsByRepoID(e Engine, repoID int64) ([]int64, error) { ids := make([]int64, 0, 10) - err := e.Table("issue").Where("repo_id = ?", repoID).Find(&ids) + err := e.Table("issue").Cols("id").Where("repo_id = ?", repoID).Find(&ids) return ids, err } @@ -1116,6 +1105,7 @@ type IssuesOptions struct { LabelIDs []int64 IncludedLabelNames []string ExcludedLabelNames []string + IncludeMilestones []string SortType string IssueIDs []int64 UpdatedAfterUnix int64 @@ -1143,9 +1133,18 @@ func sortIssuesSession(sess *xorm.Session, sortType string, priorityRepoID int64 sess.Desc("issue.priority") case "nearduedate": // 253370764800 is 01/01/9999 @ 12:00am (UTC) - sess.OrderBy("CASE WHEN issue.deadline_unix = 0 THEN 253370764800 ELSE issue.deadline_unix END ASC") + sess.Join("LEFT", "milestone", "issue.milestone_id = milestone.id"). + OrderBy("CASE " + + "WHEN issue.deadline_unix = 0 AND (milestone.deadline_unix = 0 OR milestone.deadline_unix IS NULL) THEN 253370764800 " + + "WHEN milestone.deadline_unix = 0 OR milestone.deadline_unix IS NULL THEN issue.deadline_unix " + + "WHEN milestone.deadline_unix < issue.deadline_unix OR issue.deadline_unix = 0 THEN milestone.deadline_unix " + + "ELSE issue.deadline_unix END ASC") case "farduedate": - sess.Desc("issue.deadline_unix") + sess.Join("LEFT", "milestone", "issue.milestone_id = milestone.id"). + OrderBy("CASE " + + "WHEN milestone.deadline_unix IS NULL THEN issue.deadline_unix " + + "WHEN milestone.deadline_unix < issue.deadline_unix OR issue.deadline_unix = 0 THEN milestone.deadline_unix " + + "ELSE issue.deadline_unix END DESC") case "priorityrepo": sess.OrderBy("CASE WHEN issue.repo_id = " + strconv.FormatInt(priorityRepoID, 10) + " THEN 1 ELSE 2 END, issue.created_unix DESC") default: @@ -1248,6 +1247,13 @@ func (opts *IssuesOptions) setupSession(sess *xorm.Session) { if len(opts.ExcludedLabelNames) > 0 { sess.And(builder.NotIn("issue.id", BuildLabelNamesIssueIDsCondition(opts.ExcludedLabelNames))) } + + if len(opts.IncludeMilestones) > 0 { + sess.In("issue.milestone_id", + builder.Select("id"). + From("milestone"). + Where(builder.In("name", opts.IncludeMilestones))) + } } func applyReposCondition(sess *xorm.Session, repoIDs []int64) *xorm.Session { diff --git a/models/issue_assignees_test.go b/models/issue_assignees_test.go index 79257013f843..e0359b0b9faa 100644 --- a/models/issue_assignees_test.go +++ b/models/issue_assignees_test.go @@ -61,6 +61,11 @@ func TestUpdateAssignee(t *testing.T) { } func TestMakeIDsFromAPIAssigneesToAdd(t *testing.T) { + assert.NoError(t, PrepareTestDatabase()) + + _ = AssertExistsAndLoadBean(t, &User{ID: 1}).(*User) + _ = AssertExistsAndLoadBean(t, &User{ID: 2}).(*User) + IDs, err := MakeIDsFromAPIAssigneesToAdd("", []string{""}) assert.NoError(t, err) assert.Equal(t, []int64{}, IDs) diff --git a/models/issue_comment.go b/models/issue_comment.go index 26bf122dc983..4c5b77ff8e55 100644 --- a/models/issue_comment.go +++ b/models/issue_comment.go @@ -184,7 +184,7 @@ type Comment struct { RefRepoID int64 `xorm:"index"` // Repo where the referencing RefIssueID int64 `xorm:"index"` RefCommentID int64 `xorm:"index"` // 0 if origin is Issue title or content (or PR's) - RefAction references.XRefAction `xorm:"SMALLINT"` // What hapens if RefIssueID resolves + RefAction references.XRefAction `xorm:"SMALLINT"` // What happens if RefIssueID resolves RefIsPull bool RefRepo *Repository `xorm:"-"` @@ -762,6 +762,8 @@ func updateCommentInfos(e *xorm.Session, opts *CreateCommentOptions, comment *Co } } fallthrough + case CommentTypeReview: + fallthrough case CommentTypeComment: if _, err = e.Exec("UPDATE `issue` SET num_comments=num_comments+1 WHERE id=?", opts.Issue.ID); err != nil { return err @@ -1226,7 +1228,7 @@ func UpdateCommentsMigrationsByType(tp structs.GitServiceType, originalAuthorID return err } -// CreatePushPullComment create push code to pull base commend +// CreatePushPullComment create push code to pull base comment func CreatePushPullComment(pusher *User, pr *PullRequest, oldCommitID, newCommitID string) (comment *Comment, err error) { if pr.HasMerged || oldCommitID == "" || newCommitID == "" { return nil, nil @@ -1260,7 +1262,7 @@ func CreatePushPullComment(pusher *User, pr *PullRequest, oldCommitID, newCommit return } -// getCommitsFromRepo get commit IDs from repo in betwern oldCommitID and newCommitID +// getCommitsFromRepo get commit IDs from repo in between oldCommitID and newCommitID // isForcePush will be true if oldCommit isn't on the branch // Commit on baseBranch will skip func getCommitIDsFromRepo(repo *Repository, oldCommitID, newCommitID, baseBranch string) (commitIDs []string, isForcePush bool, err error) { diff --git a/models/issue_dependency.go b/models/issue_dependency.go index 6ecaf3432f89..4008008f3474 100644 --- a/models/issue_dependency.go +++ b/models/issue_dependency.go @@ -128,7 +128,7 @@ func issueNoDependenciesLeft(e Engine, issue *Issue) (bool, error) { return !exists, err } -// IsDependenciesEnabled returns if dependecies are enabled and returns the default setting if not set. +// IsDependenciesEnabled returns if dependencies are enabled and returns the default setting if not set. func (repo *Repository) IsDependenciesEnabled() bool { return repo.isDependenciesEnabled(x) } diff --git a/models/issue_label_test.go b/models/issue_label_test.go index fc6590e96859..3dde1a40696a 100644 --- a/models/issue_label_test.go +++ b/models/issue_label_test.go @@ -135,7 +135,7 @@ func TestGetLabelsByRepoID(t *testing.T) { testSuccess(1, "default", []int64{1, 2}) } -// Org vrsions +// Org versions func TestGetLabelInOrgByName(t *testing.T) { assert.NoError(t, PrepareTestDatabase()) diff --git a/models/issue_milestone.go b/models/issue_milestone.go index 5aa83ea691d5..5e934cde0a02 100644 --- a/models/issue_milestone.go +++ b/models/issue_milestone.go @@ -129,8 +129,12 @@ func GetMilestoneByRepoIDANDName(repoID int64, name string) (*Milestone, error) // GetMilestoneByID returns the milestone via id . func GetMilestoneByID(id int64) (*Milestone, error) { + return getMilestoneByID(x, id) +} + +func getMilestoneByID(e Engine, id int64) (*Milestone, error) { var m Milestone - has, err := x.ID(id).Get(&m) + has, err := e.ID(id).Get(&m) if err != nil { return nil, err } else if !has { @@ -155,10 +159,6 @@ func UpdateMilestone(m *Milestone, oldIsClosed bool) error { return err } - if err := updateMilestoneCompleteness(sess, m.ID); err != nil { - return err - } - // if IsClosed changed, update milestone numbers of repository if oldIsClosed != m.IsClosed { if err := updateRepoMilestoneNum(sess, m.RepoID); err != nil { @@ -171,23 +171,31 @@ func UpdateMilestone(m *Milestone, oldIsClosed bool) error { func updateMilestone(e Engine, m *Milestone) error { m.Name = strings.TrimSpace(m.Name) - _, err := e.ID(m.ID).AllCols(). + _, err := e.ID(m.ID).AllCols().Update(m) + if err != nil { + return err + } + return updateMilestoneCounters(e, m.ID) +} + +// updateMilestoneCounters calculates NumIssues, NumClosesIssues and Completeness +func updateMilestoneCounters(e Engine, id int64) error { + _, err := e.ID(id). SetExpr("num_issues", builder.Select("count(*)").From("issue").Where( - builder.Eq{"milestone_id": m.ID}, + builder.Eq{"milestone_id": id}, )). SetExpr("num_closed_issues", builder.Select("count(*)").From("issue").Where( builder.Eq{ - "milestone_id": m.ID, + "milestone_id": id, "is_closed": true, }, )). - Update(m) - return err -} - -func updateMilestoneCompleteness(e Engine, milestoneID int64) error { - _, err := e.Exec("UPDATE `milestone` SET completeness=100*num_closed_issues/(CASE WHEN num_issues > 0 THEN num_issues ELSE 1 END) WHERE id=?", - milestoneID, + Update(&Milestone{}) + if err != nil { + return err + } + _, err = e.Exec("UPDATE `milestone` SET completeness=100*num_closed_issues/(CASE WHEN num_issues > 0 THEN num_issues ELSE 1 END) WHERE id=?", + id, ) return err } @@ -256,25 +264,15 @@ func changeMilestoneAssign(e *xorm.Session, doer *User, issue *Issue, oldMilesto } if oldMilestoneID > 0 { - if err := updateMilestoneTotalNum(e, oldMilestoneID); err != nil { + if err := updateMilestoneCounters(e, oldMilestoneID); err != nil { return err } - if issue.IsClosed { - if err := updateMilestoneClosedNum(e, oldMilestoneID); err != nil { - return err - } - } } if issue.MilestoneID > 0 { - if err := updateMilestoneTotalNum(e, issue.MilestoneID); err != nil { + if err := updateMilestoneCounters(e, issue.MilestoneID); err != nil { return err } - if issue.IsClosed { - if err := updateMilestoneClosedNum(e, issue.MilestoneID); err != nil { - return err - } - } } if oldMilestoneID > 0 || issue.MilestoneID > 0 { @@ -622,29 +620,6 @@ func updateRepoMilestoneNum(e Engine, repoID int64) error { return err } -func updateMilestoneTotalNum(e Engine, milestoneID int64) (err error) { - if _, err = e.Exec("UPDATE `milestone` SET num_issues=(SELECT count(*) FROM issue WHERE milestone_id=?) WHERE id=?", - milestoneID, - milestoneID, - ); err != nil { - return - } - - return updateMilestoneCompleteness(e, milestoneID) -} - -func updateMilestoneClosedNum(e Engine, milestoneID int64) (err error) { - if _, err = e.Exec("UPDATE `milestone` SET num_closed_issues=(SELECT count(*) FROM issue WHERE milestone_id=? AND is_closed=?) WHERE id=?", - milestoneID, - true, - milestoneID, - ); err != nil { - return - } - - return updateMilestoneCompleteness(e, milestoneID) -} - // _____ _ _ _____ _ // |_ _| __ __ _ ___| | _____ __| |_ _(_)_ __ ___ ___ ___ // | || '__/ _` |/ __| |/ / _ \/ _` | | | | | '_ ` _ \ / _ \/ __| diff --git a/models/issue_milestone_test.go b/models/issue_milestone_test.go index af264aa27457..5406129884fb 100644 --- a/models/issue_milestone_test.go +++ b/models/issue_milestone_test.go @@ -215,7 +215,7 @@ func TestChangeMilestoneStatus(t *testing.T) { CheckConsistencyFor(t, &Repository{ID: milestone.RepoID}, &Milestone{}) } -func TestUpdateMilestoneClosedNum(t *testing.T) { +func TestUpdateMilestoneCounters(t *testing.T) { assert.NoError(t, PrepareTestDatabase()) issue := AssertExistsAndLoadBean(t, &Issue{MilestoneID: 1}, "is_closed=0").(*Issue) @@ -224,14 +224,14 @@ func TestUpdateMilestoneClosedNum(t *testing.T) { issue.ClosedUnix = timeutil.TimeStampNow() _, err := x.ID(issue.ID).Cols("is_closed", "closed_unix").Update(issue) assert.NoError(t, err) - assert.NoError(t, updateMilestoneClosedNum(x, issue.MilestoneID)) + assert.NoError(t, updateMilestoneCounters(x, issue.MilestoneID)) CheckConsistencyFor(t, &Milestone{}) issue.IsClosed = false issue.ClosedUnix = 0 _, err = x.ID(issue.ID).Cols("is_closed", "closed_unix").Update(issue) assert.NoError(t, err) - assert.NoError(t, updateMilestoneClosedNum(x, issue.MilestoneID)) + assert.NoError(t, updateMilestoneCounters(x, issue.MilestoneID)) CheckConsistencyFor(t, &Milestone{}) } diff --git a/models/issue_stopwatch_test.go b/models/issue_stopwatch_test.go index 6fc2b1ce5d71..b6af5e93b51a 100644 --- a/models/issue_stopwatch_test.go +++ b/models/issue_stopwatch_test.go @@ -67,7 +67,7 @@ func TestCreateOrStopIssueStopwatch(t *testing.T) { assert.NoError(t, CreateOrStopIssueStopwatch(user3, issue1)) sw := AssertExistsAndLoadBean(t, &Stopwatch{UserID: 3, IssueID: 1}).(*Stopwatch) - assert.Equal(t, true, sw.CreatedUnix <= timeutil.TimeStampNow()) + assert.LessOrEqual(t, sw.CreatedUnix, timeutil.TimeStampNow()) assert.NoError(t, CreateOrStopIssueStopwatch(user2, issue2)) AssertNotExistsBean(t, &Stopwatch{UserID: 2, IssueID: 2}) diff --git a/models/issue_test.go b/models/issue_test.go index c21b1d6ae98e..f2c9b7a68f8e 100644 --- a/models/issue_test.go +++ b/models/issue_test.go @@ -36,6 +36,14 @@ func TestIssue_ReplaceLabels(t *testing.T) { testSuccess(1, []int64{}) } +func Test_GetIssueIDsByRepoID(t *testing.T) { + assert.NoError(t, PrepareTestDatabase()) + + ids, err := GetIssueIDsByRepoID(1) + assert.NoError(t, err) + assert.Len(t, ids, 5) +} + func TestIssueAPIURL(t *testing.T) { assert.NoError(t, PrepareTestDatabase()) issue := AssertExistsAndLoadBean(t, &Issue{ID: 1}).(*Issue) @@ -337,37 +345,45 @@ func TestGetRepoIDsForIssuesOptions(t *testing.T) { } } -func testInsertIssue(t *testing.T, title, content string) { - repo := AssertExistsAndLoadBean(t, &Repository{ID: 1}).(*Repository) - user := AssertExistsAndLoadBean(t, &User{ID: 2}).(*User) - - issue := Issue{ - RepoID: repo.ID, - PosterID: user.ID, - Title: title, - Content: content, - } - err := NewIssue(repo, &issue, nil, nil) - assert.NoError(t, err) - +func testInsertIssue(t *testing.T, title, content string, expectIndex int64) *Issue { var newIssue Issue - has, err := x.ID(issue.ID).Get(&newIssue) - assert.NoError(t, err) - assert.True(t, has) - assert.EqualValues(t, issue.Title, newIssue.Title) - assert.EqualValues(t, issue.Content, newIssue.Content) - // there are 5 issues and max index is 5 on repository 1, so this one should 6 - assert.EqualValues(t, 6, newIssue.Index) + t.Run(title, func(t *testing.T) { + repo := AssertExistsAndLoadBean(t, &Repository{ID: 1}).(*Repository) + user := AssertExistsAndLoadBean(t, &User{ID: 2}).(*User) + + issue := Issue{ + RepoID: repo.ID, + PosterID: user.ID, + Title: title, + Content: content, + } + err := NewIssue(repo, &issue, nil, nil) + assert.NoError(t, err) - _, err = x.ID(issue.ID).Delete(new(Issue)) - assert.NoError(t, err) + has, err := x.ID(issue.ID).Get(&newIssue) + assert.NoError(t, err) + assert.True(t, has) + assert.EqualValues(t, issue.Title, newIssue.Title) + assert.EqualValues(t, issue.Content, newIssue.Content) + if expectIndex > 0 { + assert.EqualValues(t, expectIndex, newIssue.Index) + } + }) + return &newIssue } func TestIssue_InsertIssue(t *testing.T) { assert.NoError(t, PrepareTestDatabase()) - testInsertIssue(t, "my issue1", "special issue's comments?") - testInsertIssue(t, `my issue2, this is my son's love \n \r \ `, "special issue's '' comments?") + // there are 5 issues and max index is 5 on repository 1, so this one should 6 + issue := testInsertIssue(t, "my issue1", "special issue's comments?", 6) + _, err := x.ID(issue.ID).Delete(new(Issue)) + assert.NoError(t, err) + + issue = testInsertIssue(t, `my issue2, this is my son's love \n \r \ `, "special issue's '' comments?", 7) + _, err = x.ID(issue.ID).Delete(new(Issue)) + assert.NoError(t, err) + } func TestIssue_ResolveMentions(t *testing.T) { diff --git a/models/issue_watch_test.go b/models/issue_watch_test.go index 762b1486c646..013ca67e1efc 100644 --- a/models/issue_watch_test.go +++ b/models/issue_watch_test.go @@ -32,7 +32,7 @@ func TestGetIssueWatch(t *testing.T) { iw, exists, err := GetIssueWatch(2, 2) assert.True(t, exists) assert.NoError(t, err) - assert.EqualValues(t, false, iw.IsWatching) + assert.False(t, iw.IsWatching) _, exists, err = GetIssueWatch(3, 1) assert.False(t, exists) diff --git a/models/issue_xref.go b/models/issue_xref.go index 8d3caba8c0b6..fc8dd70f775f 100644 --- a/models/issue_xref.go +++ b/models/issue_xref.go @@ -290,6 +290,9 @@ func CommentTypeIsRef(t CommentType) bool { // RefCommentHTMLURL returns the HTML URL for the comment that created this reference func (comment *Comment) RefCommentHTMLURL() string { + if comment.RefCommentID == 0 { + return "" + } if err := comment.LoadRefComment(); err != nil { // Silently dropping errors :unamused: log.Error("LoadRefComment(%d): %v", comment.RefCommentID, err) return "" diff --git a/models/issue_xref_test.go b/models/issue_xref_test.go index 936d1124be47..a2d1a4b11e1f 100644 --- a/models/issue_xref_test.go +++ b/models/issue_xref_test.go @@ -25,7 +25,7 @@ func TestXRef_AddCrossReferences(t *testing.T) { ref := AssertExistsAndLoadBean(t, &Comment{IssueID: itarget.ID, RefIssueID: pr.ID, RefCommentID: 0}).(*Comment) assert.Equal(t, CommentTypePullRef, ref.Type) assert.Equal(t, pr.RepoID, ref.RefRepoID) - assert.Equal(t, true, ref.RefIsPull) + assert.True(t, ref.RefIsPull) assert.Equal(t, references.XRefActionCloses, ref.RefAction) // Comment on PR to reopen issue #1 @@ -34,7 +34,7 @@ func TestXRef_AddCrossReferences(t *testing.T) { ref = AssertExistsAndLoadBean(t, &Comment{IssueID: itarget.ID, RefIssueID: pr.ID, RefCommentID: c.ID}).(*Comment) assert.Equal(t, CommentTypeCommentRef, ref.Type) assert.Equal(t, pr.RepoID, ref.RefRepoID) - assert.Equal(t, true, ref.RefIsPull) + assert.True(t, ref.RefIsPull) assert.Equal(t, references.XRefActionReopens, ref.RefAction) // Issue mentioning issue #1 @@ -43,7 +43,7 @@ func TestXRef_AddCrossReferences(t *testing.T) { ref = AssertExistsAndLoadBean(t, &Comment{IssueID: itarget.ID, RefIssueID: i.ID, RefCommentID: 0}).(*Comment) assert.Equal(t, CommentTypeIssueRef, ref.Type) assert.Equal(t, pr.RepoID, ref.RefRepoID) - assert.Equal(t, false, ref.RefIsPull) + assert.False(t, ref.RefIsPull) assert.Equal(t, references.XRefActionNone, ref.RefAction) // Issue #4 to test against @@ -55,7 +55,7 @@ func TestXRef_AddCrossReferences(t *testing.T) { ref = AssertExistsAndLoadBean(t, &Comment{IssueID: itarget.ID, RefIssueID: i.ID, RefCommentID: 0}).(*Comment) assert.Equal(t, CommentTypeIssueRef, ref.Type) assert.Equal(t, i.RepoID, ref.RefRepoID) - assert.Equal(t, false, ref.RefIsPull) + assert.False(t, ref.RefIsPull) assert.Equal(t, references.XRefActionNone, ref.RefAction) // Cross-reference to issue #4 with no permission @@ -125,12 +125,27 @@ func TestXRef_ResolveCrossReferences(t *testing.T) { func testCreateIssue(t *testing.T, repo, doer int64, title, content string, ispull bool) *Issue { r := AssertExistsAndLoadBean(t, &Repository{ID: repo}).(*Repository) d := AssertExistsAndLoadBean(t, &User{ID: doer}).(*User) - i := &Issue{RepoID: r.ID, PosterID: d.ID, Poster: d, Title: title, Content: content, IsPull: ispull} + + idx, err := GetNextResourceIndex("issue_index", r.ID) + assert.NoError(t, err) + i := &Issue{ + RepoID: r.ID, + PosterID: d.ID, + Poster: d, + Title: title, + Content: content, + IsPull: ispull, + Index: idx, + } sess := x.NewSession() defer sess.Close() + assert.NoError(t, sess.Begin()) - _, err := sess.SetExpr("`index`", "coalesce(MAX(`index`),0)+1").Where("repo_id=?", repo).Insert(i) + err = newIssue(sess, d, NewIssueOptions{ + Repo: r, + Issue: i, + }) assert.NoError(t, err) i, err = getIssueByID(sess, i.ID) assert.NoError(t, err) diff --git a/models/list_options.go b/models/list_options.go index 9cccd05465af..ff02933f9bac 100644 --- a/models/list_options.go +++ b/models/list_options.go @@ -41,7 +41,7 @@ func (opts *ListOptions) setEnginePagination(e Engine) Engine { func (opts *ListOptions) GetStartEnd() (start, end int) { opts.setDefaultValues() start = (opts.Page - 1) * opts.PageSize - end = start + opts.Page + end = start + opts.PageSize return } diff --git a/models/login_source.go b/models/login_source.go index fd977e20a5d7..f9bd496b3add 100644 --- a/models/login_source.go +++ b/models/login_source.go @@ -18,9 +18,11 @@ import ( "code.gitea.io/gitea/modules/auth/oauth2" "code.gitea.io/gitea/modules/auth/pam" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/secret" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/util" + gouuid "github.com/google/uuid" jsoniter "github.com/json-iterator/go" "xorm.io/xorm" @@ -68,6 +70,17 @@ var ( _ convert.Conversion = &SSPIConfig{} ) +// jsonUnmarshalIgnoreErroneousBOM - due to a bug in xorm (see https://gitea.com/xorm/xorm/pulls/1957) - it's +// possible that a Blob may gain an unwanted prefix of 0xff 0xfe. +func jsonUnmarshalIgnoreErroneousBOM(bs []byte, v interface{}) error { + json := jsoniter.ConfigCompatibleWithStandardLibrary + err := json.Unmarshal(bs, &v) + if err != nil && len(bs) > 2 && bs[0] == 0xff && bs[1] == 0xfe { + err = json.Unmarshal(bs[2:], &v) + } + return err +} + // LDAPConfig holds configuration for LDAP login source. type LDAPConfig struct { *ldap.Source @@ -75,12 +88,25 @@ type LDAPConfig struct { // FromDB fills up a LDAPConfig from serialized format. func (cfg *LDAPConfig) FromDB(bs []byte) error { - json := jsoniter.ConfigCompatibleWithStandardLibrary - return json.Unmarshal(bs, &cfg) + err := jsonUnmarshalIgnoreErroneousBOM(bs, &cfg) + if err != nil { + return err + } + if cfg.BindPasswordEncrypt != "" { + cfg.BindPassword, err = secret.DecryptSecret(setting.SecretKey, cfg.BindPasswordEncrypt) + cfg.BindPasswordEncrypt = "" + } + return err } // ToDB exports a LDAPConfig to a serialized format. func (cfg *LDAPConfig) ToDB() ([]byte, error) { + var err error + cfg.BindPasswordEncrypt, err = secret.EncryptSecret(setting.SecretKey, cfg.BindPassword) + if err != nil { + return nil, err + } + cfg.BindPassword = "" json := jsoniter.ConfigCompatibleWithStandardLibrary return json.Marshal(cfg) } @@ -103,8 +129,7 @@ type SMTPConfig struct { // FromDB fills up an SMTPConfig from serialized format. func (cfg *SMTPConfig) FromDB(bs []byte) error { - json := jsoniter.ConfigCompatibleWithStandardLibrary - return json.Unmarshal(bs, cfg) + return jsonUnmarshalIgnoreErroneousBOM(bs, cfg) } // ToDB exports an SMTPConfig to a serialized format. @@ -116,12 +141,12 @@ func (cfg *SMTPConfig) ToDB() ([]byte, error) { // PAMConfig holds configuration for the PAM login source. type PAMConfig struct { ServiceName string // pam service (e.g. system-auth) + EmailDomain string } // FromDB fills up a PAMConfig from serialized format. func (cfg *PAMConfig) FromDB(bs []byte) error { - json := jsoniter.ConfigCompatibleWithStandardLibrary - return json.Unmarshal(bs, &cfg) + return jsonUnmarshalIgnoreErroneousBOM(bs, cfg) } // ToDB exports a PAMConfig to a serialized format. @@ -142,8 +167,7 @@ type OAuth2Config struct { // FromDB fills up an OAuth2Config from serialized format. func (cfg *OAuth2Config) FromDB(bs []byte) error { - json := jsoniter.ConfigCompatibleWithStandardLibrary - return json.Unmarshal(bs, cfg) + return jsonUnmarshalIgnoreErroneousBOM(bs, cfg) } // ToDB exports an SMTPConfig to a serialized format. @@ -163,8 +187,7 @@ type SSPIConfig struct { // FromDB fills up an SSPIConfig from serialized format. func (cfg *SSPIConfig) FromDB(bs []byte) error { - json := jsoniter.ConfigCompatibleWithStandardLibrary - return json.Unmarshal(bs, cfg) + return jsonUnmarshalIgnoreErroneousBOM(bs, cfg) } // ToDB exports an SSPIConfig to a serialized format. @@ -320,7 +343,7 @@ func CreateLoginSource(source *LoginSource) error { } else if has { return ErrLoginSourceAlreadyExist{source.Name} } - // Synchronization is only aviable with LDAP for now + // Synchronization is only available with LDAP for now if !source.IsLDAP() { source.IsSyncEnabled = false } @@ -696,15 +719,26 @@ func LoginViaPAM(user *User, login, password string, sourceID int64, cfg *PAMCon // Allow PAM sources with `@` in their name, like from Active Directory username := pamLogin + email := pamLogin idx := strings.Index(pamLogin, "@") if idx > -1 { username = pamLogin[:idx] } + if ValidateEmail(email) != nil { + if cfg.EmailDomain != "" { + email = fmt.Sprintf("%s@%s", username, cfg.EmailDomain) + } else { + email = fmt.Sprintf("%s@%s", username, setting.Service.NoReplyAddress) + } + if ValidateEmail(email) != nil { + email = gouuid.New().String() + "@localhost" + } + } user = &User{ LowerName: strings.ToLower(username), Name: username, - Email: pamLogin, + Email: email, Passwd: password, LoginType: LoginPAM, LoginSource: sourceID, @@ -828,7 +862,11 @@ func UserSignIn(username, password string) (*User, error) { return authUser, nil } - log.Warn("Failed to login '%s' via '%s': %v", username, source.Name, err) + if IsErrUserNotExist(err) { + log.Debug("Failed to login '%s' via '%s': %v", username, source.Name, err) + } else { + log.Warn("Failed to login '%s' via '%s': %v", username, source.Name, err) + } } return nil, ErrUserNotExist{user.ID, user.Name, 0} diff --git a/models/migrations/migrations.go b/models/migrations/migrations.go index c54c383fb810..7a4193199c2f 100644 --- a/models/migrations/migrations.go +++ b/models/migrations/migrations.go @@ -187,7 +187,7 @@ var migrations = []Migration{ // v122 -> v123 NewMigration("Add Require Signed Commits to ProtectedBranch", addRequireSignedCommits), // v123 -> v124 - NewMigration("Add original informations for reactions", addReactionOriginals), + NewMigration("Add original information for reactions", addReactionOriginals), // v124 -> v125 NewMigration("Add columns to user and repository", addUserRepoMissingColumns), // v125 -> v126 @@ -309,6 +309,24 @@ var migrations = []Migration{ NewMigration("Add LFS columns to Mirror", addLFSMirrorColumns), // v179 -> v180 NewMigration("Convert avatar url to text", convertAvatarURLToText), + // v180 -> v181 + NewMigration("Delete credentials from past migrations", deleteMigrationCredentials), + // v181 -> v182 + NewMigration("Always save primary email on email address table", addPrimaryEmail2EmailAddress), + // v182 -> v183 + NewMigration("Add issue resource index table", addIssueResourceIndexTable), + // v183 -> v184 + NewMigration("Create PushMirror table", createPushMirrorTable), + // v184 -> v185 + NewMigration("Rename Task errors to message", renameTaskErrorsToMessage), + // v185 -> v186 + NewMigration("Add new table repo_archiver", addRepoArchiver), + // v186 -> v187 + NewMigration("Create protected tag table", createProtectedTagTable), + // v187 -> v188 + NewMigration("Drop unneeded webhook related columns", dropWebhookColumns), + // v188 -> v189 + NewMigration("Add key is verified to gpg key", addKeyIsVerified), } // GetCurrentDBVersion returns the current db version diff --git a/models/migrations/migrations_test.go b/models/migrations/migrations_test.go index 641d972b8b37..26066580d897 100644 --- a/models/migrations/migrations_test.go +++ b/models/migrations/migrations_test.go @@ -16,6 +16,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/base" + "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/util" @@ -55,7 +56,7 @@ func TestMain(m *testing.M) { setting.SetCustomPathAndConf("", "", "") setting.NewContext() - setting.CheckLFSVersion() + git.CheckLFSVersion() setting.InitDBConfig() setting.NewLogServices(true) diff --git a/models/migrations/v111.go b/models/migrations/v111.go index b292f0c7d38a..95f0ec22dd38 100644 --- a/models/migrations/v111.go +++ b/models/migrations/v111.go @@ -141,8 +141,8 @@ func addBranchProtectionCanPushAndEnableWhitelist(x *xorm.Engine) error { return perm, err } - // Prevent strangers from checking out public repo of private orginization - // Allow user if they are collaborator of a repo within a private orginization but not a member of the orginization itself + // Prevent strangers from checking out public repo of private organization + // Allow user if they are collaborator of a repo within a private organization but not a member of the organization itself hasOrgVisible := true // Not SignedUser if user == nil { diff --git a/models/migrations/v147.go b/models/migrations/v147.go index 7488b370aace..ad4ec4ef7ffd 100644 --- a/models/migrations/v147.go +++ b/models/migrations/v147.go @@ -75,7 +75,7 @@ func createReviewsForCodeComments(x *xorm.Engine) error { RefRepoID int64 `xorm:"index"` // Repo where the referencing RefIssueID int64 `xorm:"index"` RefCommentID int64 `xorm:"index"` // 0 if origin is Issue title or content (or PR's) - RefAction int `xorm:"SMALLINT"` // What hapens if RefIssueID resolves + RefAction int `xorm:"SMALLINT"` // What happens if RefIssueID resolves RefIsPull bool } diff --git a/models/migrations/v156.go b/models/migrations/v156.go index 1e7cf282774b..7158d7bb6b71 100644 --- a/models/migrations/v156.go +++ b/models/migrations/v156.go @@ -88,7 +88,7 @@ func fixPublisherIDforTagReleases(x *xorm.Engine) error { repo = new(Repository) has, err := sess.ID(release.RepoID).Get(repo) if err != nil { - log.Error("Error whilst loading repository[%d] for release[%d] with tag name %s", release.RepoID, release.ID, release.TagName) + log.Error("Error whilst loading repository[%d] for release[%d] with tag name %s. Error: %v", release.RepoID, release.ID, release.TagName, err) return err } else if !has { log.Warn("Release[%d] is orphaned and refers to non-existing repository %d", release.ID, release.RepoID) @@ -105,13 +105,13 @@ func fixPublisherIDforTagReleases(x *xorm.Engine) error { } if _, err := sess.ID(release.RepoID).Get(repo); err != nil { - log.Error("Error whilst loading repository[%d] for release[%d] with tag name %s", release.RepoID, release.ID, release.TagName) + log.Error("Error whilst loading repository[%d] for release[%d] with tag name %s. Error: %v", release.RepoID, release.ID, release.TagName, err) return err } } gitRepo, err = git.OpenRepository(repoPath(repo.OwnerName, repo.Name)) if err != nil { - log.Error("Error whilst opening git repo for %-v", repo) + log.Error("Error whilst opening git repo for [%d]%s/%s. Error: %v", repo.ID, repo.OwnerName, repo.Name, err) return err } } @@ -119,18 +119,36 @@ func fixPublisherIDforTagReleases(x *xorm.Engine) error { commit, err := gitRepo.GetTagCommit(release.TagName) if err != nil { if git.IsErrNotExist(err) { - log.Warn("Unable to find commit %s for Tag: %s in %-v. Cannot update publisher ID.", err.(git.ErrNotExist).ID, release.TagName, repo) + log.Warn("Unable to find commit %s for Tag: %s in [%d]%s/%s. Cannot update publisher ID.", err.(git.ErrNotExist).ID, release.TagName, repo.ID, repo.OwnerName, repo.Name) continue } - log.Error("Error whilst getting commit for Tag: %s in %-v.", release.TagName, repo) + log.Error("Error whilst getting commit for Tag: %s in [%d]%s/%s. Error: %v", release.TagName, repo.ID, repo.OwnerName, repo.Name, err) return fmt.Errorf("GetTagCommit: %v", err) } + if commit.Author.Email == "" { + log.Warn("Tag: %s in Repo[%d]%s/%s does not have a tagger.", release.TagName, repo.ID, repo.OwnerName, repo.Name) + commit, err = gitRepo.GetCommit(commit.ID.String()) + if err != nil { + if git.IsErrNotExist(err) { + log.Warn("Unable to find commit %s for Tag: %s in [%d]%s/%s. Cannot update publisher ID.", err.(git.ErrNotExist).ID, release.TagName, repo.ID, repo.OwnerName, repo.Name) + continue + } + log.Error("Error whilst getting commit for Tag: %s in [%d]%s/%s. Error: %v", release.TagName, repo.ID, repo.OwnerName, repo.Name, err) + return fmt.Errorf("GetCommit: %v", err) + } + } + + if commit.Author.Email == "" { + log.Warn("Tag: %s in Repo[%d]%s/%s does not have a Tagger and its underlying commit does not have an Author either!", release.TagName, repo.ID, repo.OwnerName, repo.Name) + continue + } + if user == nil || !strings.EqualFold(user.Email, commit.Author.Email) { user = new(User) _, err = sess.Where("email=?", commit.Author.Email).Get(user) if err != nil { - log.Error("Error whilst getting commit author by email: %s for Tag: %s in %-v.", commit.Author.Email, release.TagName, repo) + log.Error("Error whilst getting commit author by email: %s for Tag: %s in [%d]%s/%s. Error: %v", commit.Author.Email, release.TagName, repo.ID, repo.OwnerName, repo.Name, err) return err } @@ -143,7 +161,7 @@ func fixPublisherIDforTagReleases(x *xorm.Engine) error { release.PublisherID = user.ID if _, err := sess.ID(release.ID).Cols("publisher_id").Update(release); err != nil { - log.Error("Error whilst updating publisher[%d] for release[%d] with tag name %s", release.PublisherID, release.ID, release.TagName) + log.Error("Error whilst updating publisher[%d] for release[%d] with tag name %s. Error: %v", release.PublisherID, release.ID, release.TagName, err) return err } } diff --git a/models/migrations/v180.go b/models/migrations/v180.go new file mode 100644 index 000000000000..8a7637debaf1 --- /dev/null +++ b/models/migrations/v180.go @@ -0,0 +1,123 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package migrations + +import ( + "code.gitea.io/gitea/modules/util" + + jsoniter "github.com/json-iterator/go" + "xorm.io/builder" + "xorm.io/xorm" +) + +func deleteMigrationCredentials(x *xorm.Engine) (err error) { + // Task represents a task + type Task struct { + ID int64 + DoerID int64 `xorm:"index"` // operator + OwnerID int64 `xorm:"index"` // repo owner id, when creating, the repoID maybe zero + RepoID int64 `xorm:"index"` + Type int + Status int `xorm:"index"` + StartTime int64 + EndTime int64 + PayloadContent string `xorm:"TEXT"` + Errors string `xorm:"TEXT"` // if task failed, saved the error reason + Created int64 `xorm:"created"` + } + + const TaskTypeMigrateRepo = 0 + const TaskStatusStopped = 2 + + const batchSize = 100 + + // only match migration tasks, that are not pending or running + cond := builder.Eq{ + "type": TaskTypeMigrateRepo, + }.And(builder.Gte{ + "status": TaskStatusStopped, + }) + + sess := x.NewSession() + defer sess.Close() + + for start := 0; ; start += batchSize { + tasks := make([]*Task, 0, batchSize) + if err = sess.Limit(batchSize, start).Where(cond, 0).Find(&tasks); err != nil { + return + } + if len(tasks) == 0 { + break + } + if err = sess.Begin(); err != nil { + return + } + for _, t := range tasks { + if t.PayloadContent, err = removeCredentials(t.PayloadContent); err != nil { + return + } + if _, err = sess.ID(t.ID).Cols("payload_content").Update(t); err != nil { + return + } + } + if err = sess.Commit(); err != nil { + return + } + } + return +} + +func removeCredentials(payload string) (string, error) { + // MigrateOptions defines the way a repository gets migrated + // this is for internal usage by migrations module and func who interact with it + type MigrateOptions struct { + // required: true + CloneAddr string `json:"clone_addr" binding:"Required"` + CloneAddrEncrypted string `json:"clone_addr_encrypted,omitempty"` + AuthUsername string `json:"auth_username"` + AuthPassword string `json:"-"` + AuthPasswordEncrypted string `json:"auth_password_encrypted,omitempty"` + AuthToken string `json:"-"` + AuthTokenEncrypted string `json:"auth_token_encrypted,omitempty"` + // required: true + UID int `json:"uid" binding:"Required"` + // required: true + RepoName string `json:"repo_name" binding:"Required"` + Mirror bool `json:"mirror"` + LFS bool `json:"lfs"` + LFSEndpoint string `json:"lfs_endpoint"` + Private bool `json:"private"` + Description string `json:"description"` + OriginalURL string + GitServiceType int + Wiki bool + Issues bool + Milestones bool + Labels bool + Releases bool + Comments bool + PullRequests bool + ReleaseAssets bool + MigrateToRepoID int64 + MirrorInterval string `json:"mirror_interval"` + } + + var opts MigrateOptions + json := jsoniter.ConfigCompatibleWithStandardLibrary + err := json.Unmarshal([]byte(payload), &opts) + if err != nil { + return "", err + } + + opts.AuthPassword = "" + opts.AuthToken = "" + opts.CloneAddr = util.NewStringURLSanitizer(opts.CloneAddr, true).Replace(opts.CloneAddr) + + confBytes, err := json.Marshal(opts) + if err != nil { + return "", err + } + return string(confBytes), nil +} diff --git a/models/migrations/v181.go b/models/migrations/v181.go new file mode 100644 index 000000000000..65045593ad6f --- /dev/null +++ b/models/migrations/v181.go @@ -0,0 +1,93 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package migrations + +import ( + "strings" + + "xorm.io/xorm" +) + +func addPrimaryEmail2EmailAddress(x *xorm.Engine) (err error) { + type User struct { + ID int64 `xorm:"pk autoincr"` + Email string `xorm:"NOT NULL"` + IsActive bool `xorm:"INDEX"` // Activate primary email + } + + type EmailAddress1 struct { + ID int64 `xorm:"pk autoincr"` + UID int64 `xorm:"INDEX NOT NULL"` + Email string `xorm:"UNIQUE NOT NULL"` + LowerEmail string + IsActivated bool + IsPrimary bool `xorm:"DEFAULT(false) NOT NULL"` + } + + // Add lower_email and is_primary columns + if err = x.Table("email_address").Sync2(new(EmailAddress1)); err != nil { + return + } + + if _, err = x.Exec("UPDATE email_address SET lower_email=LOWER(email), is_primary=?", false); err != nil { + return + } + + type EmailAddress struct { + ID int64 `xorm:"pk autoincr"` + UID int64 `xorm:"INDEX NOT NULL"` + Email string `xorm:"UNIQUE NOT NULL"` + LowerEmail string `xorm:"UNIQUE NOT NULL"` + IsActivated bool + IsPrimary bool `xorm:"DEFAULT(false) NOT NULL"` + } + + // change lower_email as unique + if err = x.Sync2(new(EmailAddress)); err != nil { + return + } + + sess := x.NewSession() + defer sess.Close() + + const batchSize = 100 + + for start := 0; ; start += batchSize { + users := make([]*User, 0, batchSize) + if err = sess.Limit(batchSize, start).Find(&users); err != nil { + return + } + if len(users) == 0 { + break + } + + for _, user := range users { + var exist bool + exist, err = sess.Where("email=?", user.Email).Table("email_address").Exist() + if err != nil { + return + } + if !exist { + if _, err = sess.Insert(&EmailAddress{ + UID: user.ID, + Email: user.Email, + LowerEmail: strings.ToLower(user.Email), + IsActivated: user.IsActive, + IsPrimary: true, + }); err != nil { + return + } + } else { + if _, err = sess.Where("email=?", user.Email).Cols("is_primary").Update(&EmailAddress{ + IsPrimary: true, + }); err != nil { + return + } + } + } + } + + return nil +} diff --git a/models/migrations/v181_test.go b/models/migrations/v181_test.go new file mode 100644 index 000000000000..b392a9b71d7b --- /dev/null +++ b/models/migrations/v181_test.go @@ -0,0 +1,54 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package migrations + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_addPrimaryEmail2EmailAddress(t *testing.T) { + type User struct { + ID int64 + Email string + IsActive bool + } + + // Prepare and load the testing database + x, deferable := prepareTestEnv(t, 0, new(User)) + if x == nil || t.Failed() { + defer deferable() + return + } + defer deferable() + + err := addPrimaryEmail2EmailAddress(x) + assert.NoError(t, err) + + type EmailAddress struct { + ID int64 `xorm:"pk autoincr"` + UID int64 `xorm:"INDEX NOT NULL"` + Email string `xorm:"UNIQUE NOT NULL"` + LowerEmail string `xorm:"UNIQUE NOT NULL"` + IsActivated bool + IsPrimary bool `xorm:"DEFAULT(false) NOT NULL"` + } + + var users = make([]User, 0, 20) + err = x.Find(&users) + assert.NoError(t, err) + + for _, user := range users { + var emailAddress EmailAddress + has, err := x.Where("lower_email=?", strings.ToLower(user.Email)).Get(&emailAddress) + assert.NoError(t, err) + assert.True(t, has) + assert.True(t, emailAddress.IsPrimary) + assert.EqualValues(t, user.IsActive, emailAddress.IsActivated) + assert.EqualValues(t, user.ID, emailAddress.UID) + } +} diff --git a/models/migrations/v182.go b/models/migrations/v182.go new file mode 100644 index 000000000000..dd9a04f27e72 --- /dev/null +++ b/models/migrations/v182.go @@ -0,0 +1,42 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package migrations + +import ( + "xorm.io/xorm" +) + +func addIssueResourceIndexTable(x *xorm.Engine) error { + type ResourceIndex struct { + GroupID int64 `xorm:"index unique(s)"` + MaxIndex int64 `xorm:"index unique(s)"` + } + + sess := x.NewSession() + defer sess.Close() + + if err := sess.Begin(); err != nil { + return err + } + + if err := sess.Table("issue_index").Sync2(new(ResourceIndex)); err != nil { + return err + } + + // Remove data we're goint to rebuild + if _, err := sess.Table("issue_index").Where("1=1").Delete(&ResourceIndex{}); err != nil { + return err + } + + // Create current data for all repositories with issues and PRs + if _, err := sess.Exec("INSERT INTO issue_index (group_id, max_index) " + + "SELECT max_data.repo_id, max_data.max_index " + + "FROM ( SELECT issue.repo_id AS repo_id, max(issue.`index`) AS max_index " + + "FROM issue GROUP BY issue.repo_id) AS max_data"); err != nil { + return err + } + + return sess.Commit() +} diff --git a/models/migrations/v182_test.go b/models/migrations/v182_test.go new file mode 100644 index 000000000000..6f418f779461 --- /dev/null +++ b/models/migrations/v182_test.go @@ -0,0 +1,59 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package migrations + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_addIssueResourceIndexTable(t *testing.T) { + // Create the models used in the migration + type Issue struct { + ID int64 `xorm:"pk autoincr"` + RepoID int64 `xorm:"UNIQUE(s)"` + Index int64 `xorm:"UNIQUE(s)"` + } + + // Prepare and load the testing database + x, deferable := prepareTestEnv(t, 0, new(Issue)) + if x == nil || t.Failed() { + defer deferable() + return + } + defer deferable() + + // Run the migration + if err := addIssueResourceIndexTable(x); err != nil { + assert.NoError(t, err) + return + } + + type ResourceIndex struct { + GroupID int64 `xorm:"index unique(s)"` + MaxIndex int64 `xorm:"index unique(s)"` + } + + var start = 0 + const batchSize = 1000 + for { + var indexes = make([]ResourceIndex, 0, batchSize) + err := x.Table("issue_index").Limit(batchSize, start).Find(&indexes) + assert.NoError(t, err) + + for _, idx := range indexes { + var maxIndex int + has, err := x.SQL("SELECT max(`index`) FROM issue WHERE repo_id = ?", idx.GroupID).Get(&maxIndex) + assert.NoError(t, err) + assert.True(t, has) + assert.EqualValues(t, maxIndex, idx.MaxIndex) + } + if len(indexes) < batchSize { + break + } + start += len(indexes) + } +} diff --git a/models/migrations/v183.go b/models/migrations/v183.go new file mode 100644 index 000000000000..cc752bf827c1 --- /dev/null +++ b/models/migrations/v183.go @@ -0,0 +1,39 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package migrations + +import ( + "fmt" + "time" + + "code.gitea.io/gitea/modules/timeutil" + + "xorm.io/xorm" +) + +func createPushMirrorTable(x *xorm.Engine) error { + type PushMirror struct { + ID int64 `xorm:"pk autoincr"` + RepoID int64 `xorm:"INDEX"` + RemoteName string + + Interval time.Duration + CreatedUnix timeutil.TimeStamp `xorm:"created"` + LastUpdateUnix timeutil.TimeStamp `xorm:"INDEX last_update"` + LastError string `xorm:"text"` + } + + sess := x.NewSession() + defer sess.Close() + if err := sess.Begin(); err != nil { + return err + } + + if err := sess.Sync2(new(PushMirror)); err != nil { + return fmt.Errorf("Sync2: %v", err) + } + + return sess.Commit() +} diff --git a/models/migrations/v184.go b/models/migrations/v184.go new file mode 100644 index 000000000000..b7be342b871c --- /dev/null +++ b/models/migrations/v184.go @@ -0,0 +1,47 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package migrations + +import ( + "fmt" + + "code.gitea.io/gitea/modules/setting" + + "xorm.io/xorm" +) + +func renameTaskErrorsToMessage(x *xorm.Engine) error { + type Task struct { + Errors string `xorm:"TEXT"` // if task failed, saved the error reason + Type int + Status int `xorm:"index"` + } + + sess := x.NewSession() + defer sess.Close() + if err := sess.Begin(); err != nil { + return err + } + + if err := sess.Sync2(new(Task)); err != nil { + return fmt.Errorf("error on Sync2: %v", err) + } + + switch { + case setting.Database.UseMySQL: + if _, err := sess.Exec("ALTER TABLE `task` CHANGE errors message text"); err != nil { + return err + } + case setting.Database.UseMSSQL: + if _, err := sess.Exec("sp_rename 'task.errors', 'message', 'COLUMN'"); err != nil { + return err + } + default: + if _, err := sess.Exec("ALTER TABLE `task` RENAME COLUMN errors TO message"); err != nil { + return err + } + } + return sess.Commit() +} diff --git a/models/migrations/v185.go b/models/migrations/v185.go new file mode 100644 index 000000000000..096994889703 --- /dev/null +++ b/models/migrations/v185.go @@ -0,0 +1,22 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package migrations + +import ( + "xorm.io/xorm" +) + +func addRepoArchiver(x *xorm.Engine) error { + // RepoArchiver represents all archivers + type RepoArchiver struct { + ID int64 `xorm:"pk autoincr"` + RepoID int64 `xorm:"index unique(s)"` + Type int `xorm:"unique(s)"` + Status int + CommitID string `xorm:"VARCHAR(40) unique(s)"` + CreatedUnix int64 `xorm:"INDEX NOT NULL created"` + } + return x.Sync2(new(RepoArchiver)) +} diff --git a/models/migrations/v186.go b/models/migrations/v186.go new file mode 100644 index 000000000000..eb6ec7118cd7 --- /dev/null +++ b/models/migrations/v186.go @@ -0,0 +1,26 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package migrations + +import ( + "code.gitea.io/gitea/modules/timeutil" + + "xorm.io/xorm" +) + +func createProtectedTagTable(x *xorm.Engine) error { + type ProtectedTag struct { + ID int64 `xorm:"pk autoincr"` + RepoID int64 + NamePattern string + AllowlistUserIDs []int64 `xorm:"JSON TEXT"` + AllowlistTeamIDs []int64 `xorm:"JSON TEXT"` + + CreatedUnix timeutil.TimeStamp `xorm:"created"` + UpdatedUnix timeutil.TimeStamp `xorm:"updated"` + } + + return x.Sync2(new(ProtectedTag)) +} diff --git a/models/migrations/v187.go b/models/migrations/v187.go new file mode 100644 index 000000000000..627423717a30 --- /dev/null +++ b/models/migrations/v187.go @@ -0,0 +1,46 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package migrations + +import ( + "xorm.io/xorm" +) + +func dropWebhookColumns(x *xorm.Engine) error { + // Make sure the columns exist before dropping them + type Webhook struct { + Signature string `xorm:"TEXT"` + IsSSL bool `xorm:"is_ssl"` + } + if err := x.Sync2(new(Webhook)); err != nil { + return err + } + + type HookTask struct { + Typ string `xorm:"VARCHAR(16) index"` + URL string `xorm:"TEXT"` + Signature string `xorm:"TEXT"` + HTTPMethod string `xorm:"http_method"` + ContentType int + IsSSL bool + } + if err := x.Sync2(new(HookTask)); err != nil { + return err + } + + sess := x.NewSession() + defer sess.Close() + if err := sess.Begin(); err != nil { + return err + } + if err := dropTableColumns(sess, "webhook", "signature", "is_ssl"); err != nil { + return err + } + if err := dropTableColumns(sess, "hook_task", "typ", "url", "signature", "http_method", "content_type", "is_ssl"); err != nil { + return err + } + + return sess.Commit() +} diff --git a/models/migrations/v188.go b/models/migrations/v188.go new file mode 100644 index 000000000000..52ef4aaa810b --- /dev/null +++ b/models/migrations/v188.go @@ -0,0 +1,15 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package migrations + +import "xorm.io/xorm" + +func addKeyIsVerified(x *xorm.Engine) error { + type GPGKey struct { + Verified bool `xorm:"NOT NULL DEFAULT false"` + } + + return x.Sync(new(GPGKey)) +} diff --git a/models/migrations/v71.go b/models/migrations/v71.go index 3012dd94f5d9..e4ed46a21a5b 100644 --- a/models/migrations/v71.go +++ b/models/migrations/v71.go @@ -8,8 +8,8 @@ import ( "crypto/sha256" "fmt" - "code.gitea.io/gitea/modules/generate" "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/util" "golang.org/x/crypto/pbkdf2" "xorm.io/xorm" @@ -53,7 +53,7 @@ func addScratchHash(x *xorm.Engine) error { for _, tfa := range tfas { // generate salt - salt, err := generate.GetRandomString(10) + salt, err := util.RandomString(10) if err != nil { return err } diff --git a/models/migrations/v85.go b/models/migrations/v85.go index 8c92f10b6ed9..bdbcebeb00f2 100644 --- a/models/migrations/v85.go +++ b/models/migrations/v85.go @@ -7,9 +7,9 @@ package migrations import ( "fmt" - "code.gitea.io/gitea/modules/generate" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/util" "xorm.io/xorm" ) @@ -65,7 +65,7 @@ func hashAppToken(x *xorm.Engine) error { for _, token := range tokens { // generate salt - salt, err := generate.GetRandomString(10) + salt, err := util.RandomString(10) if err != nil { return err } diff --git a/models/models.go b/models/models.go index 73e65d828bdf..610933d3270b 100644 --- a/models/models.go +++ b/models/models.go @@ -134,6 +134,10 @@ func init() { new(ProjectIssue), new(Session), new(RepoTransfer), + new(IssueIndex), + new(PushMirror), + new(RepoArchiver), + new(ProtectedTag), ) gonicNames := []string{"SSL", "UID"} @@ -171,6 +175,10 @@ func GetNewEngine() (*xorm.Engine, error) { return engine, nil } +func syncTables() error { + return x.StoreEngine("InnoDB").Sync2(tables...) +} + // NewTestEngine sets a new test xorm.Engine func NewTestEngine() (err error) { x, err = GetNewEngine() @@ -181,7 +189,7 @@ func NewTestEngine() (err error) { x.SetMapper(names.GonicMapper{}) x.SetLogger(NewXORMLogger(!setting.IsProd())) x.ShowSQL(!setting.IsProd()) - return x.StoreEngine("InnoDB").Sync2(tables...) + return syncTables() } // SetEngine sets the xorm.Engine @@ -222,7 +230,7 @@ func NewEngine(ctx context.Context, migrateFunc func(*xorm.Engine) error) (err e return fmt.Errorf("migrate: %v", err) } - if err = x.StoreEngine("InnoDB").Sync2(tables...); err != nil { + if err = syncTables(); err != nil { return fmt.Errorf("sync database struct error: %v", err) } @@ -320,7 +328,7 @@ func DumpDatabase(filePath, dbType string) error { ID int64 `xorm:"pk autoincr"` Version int64 } - t, err := x.TableInfo(Version{}) + t, err := x.TableInfo(&Version{}) if err != nil { return err } diff --git a/models/models_test.go b/models/models_test.go index 2441ad7fb064..9793394e0b6f 100644 --- a/models/models_test.go +++ b/models/models_test.go @@ -25,7 +25,7 @@ func TestDumpDatabase(t *testing.T) { ID int64 `xorm:"pk autoincr"` Version int64 } - assert.NoError(t, x.Sync2(Version{})) + assert.NoError(t, x.Sync2(new(Version))) for _, dbName := range setting.SupportedDatabases { dbType := setting.GetDBTypeByName(dbName) diff --git a/models/notification.go b/models/notification.go index dcb0322079e4..c4c7728ad9f6 100644 --- a/models/notification.go +++ b/models/notification.go @@ -74,6 +74,7 @@ type FindNotificationOptions struct { RepoID int64 IssueID int64 Status []NotificationStatus + Source []NotificationSource UpdatedAfterUnix int64 UpdatedBeforeUnix int64 } @@ -93,6 +94,9 @@ func (opts *FindNotificationOptions) ToCond() builder.Cond { if len(opts.Status) > 0 { cond = cond.And(builder.In("notification.status", opts.Status)) } + if len(opts.Source) > 0 { + cond = cond.And(builder.In("notification.source", opts.Source)) + } if opts.UpdatedAfterUnix != 0 { cond = cond.And(builder.Gte{"notification.updated_unix": opts.UpdatedAfterUnix}) } @@ -111,13 +115,13 @@ func (opts *FindNotificationOptions) ToSession(e Engine) *xorm.Session { return sess } -func getNotifications(e Engine, options FindNotificationOptions) (nl NotificationList, err error) { +func getNotifications(e Engine, options *FindNotificationOptions) (nl NotificationList, err error) { err = options.ToSession(e).OrderBy("notification.updated_unix DESC").Find(&nl) return } // GetNotifications returns all notifications that fit to the given options. -func GetNotifications(opts FindNotificationOptions) (NotificationList, error) { +func GetNotifications(opts *FindNotificationOptions) (NotificationList, error) { return getNotifications(x, opts) } @@ -203,13 +207,14 @@ func createOrUpdateIssueNotifications(e Engine, issueID, commentID, notification for _, id := range issueWatches { toNotify[id] = struct{}{} } - - repoWatches, err := getRepoWatchersIDs(e, issue.RepoID) - if err != nil { - return err - } - for _, id := range repoWatches { - toNotify[id] = struct{}{} + if !(issue.IsPull && HasWorkInProgressPrefix(issue.Title)) { + repoWatches, err := getRepoWatchersIDs(e, issue.RepoID) + if err != nil { + return err + } + for _, id := range repoWatches { + toNotify[id] = struct{}{} + } } issueParticipants, err := issue.getParticipantIDsByIssue(e) if err != nil { diff --git a/models/oauth2.go b/models/oauth2.go index cc9de74f84eb..46da60e02dd6 100644 --- a/models/oauth2.go +++ b/models/oauth2.go @@ -132,6 +132,9 @@ func GetActiveOAuth2Providers() ([]string, map[string]OAuth2Provider, error) { // InitOAuth2 initialize the OAuth2 lib and register all active OAuth2 providers in the library func InitOAuth2() error { + if err := oauth2.InitSigningKey(); err != nil { + return err + } if err := oauth2.Init(x); err != nil { return err } diff --git a/models/oauth2_application.go b/models/oauth2_application.go index 679fdb18f957..5a924763be16 100644 --- a/models/oauth2_application.go +++ b/models/oauth2_application.go @@ -12,8 +12,8 @@ import ( "strings" "time" + "code.gitea.io/gitea/modules/auth/oauth2" "code.gitea.io/gitea/modules/secret" - "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/util" @@ -210,7 +210,7 @@ func UpdateOAuth2Application(opts UpdateOAuth2ApplicationOptions) (*OAuth2Applic return nil, err } if app.UID != opts.UserID { - return nil, fmt.Errorf("UID missmatch") + return nil, fmt.Errorf("UID mismatch") } app.Name = opts.Name @@ -376,7 +376,7 @@ func getOAuth2AuthorizationByCode(e Engine, code string) (auth *OAuth2Authorizat ////////////////////////////////////////////////////// -// OAuth2Grant represents the permission of an user for a specifc application to access resources +// OAuth2Grant represents the permission of an user for a specific application to access resources type OAuth2Grant struct { ID int64 `xorm:"pk autoincr"` UserID int64 `xorm:"INDEX unique(user_application)"` @@ -394,7 +394,7 @@ func (grant *OAuth2Grant) TableName() string { return "oauth2_grant" } -// GenerateNewAuthorizationCode generates a new authorization code for a grant and saves it to the databse +// GenerateNewAuthorizationCode generates a new authorization code for a grant and saves it to the database func (grant *OAuth2Grant) GenerateNewAuthorizationCode(redirectURI, codeChallenge, codeChallengeMethod string) (*OAuth2AuthorizationCode, error) { return grant.generateNewAuthorizationCode(x, redirectURI, codeChallenge, codeChallengeMethod) } @@ -537,13 +537,13 @@ type OAuth2Token struct { jwt.StandardClaims } -// ParseOAuth2Token parses a singed jwt string +// ParseOAuth2Token parses a signed jwt string func ParseOAuth2Token(jwtToken string) (*OAuth2Token, error) { parsedToken, err := jwt.ParseWithClaims(jwtToken, &OAuth2Token{}, func(token *jwt.Token) (interface{}, error) { - if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { + if token.Method == nil || token.Method.Alg() != oauth2.DefaultSigningKey.SigningMethod().Alg() { return nil, fmt.Errorf("unexpected signing algo: %v", token.Header["alg"]) } - return setting.OAuth2.JWTSecretBytes, nil + return oauth2.DefaultSigningKey.VerifyKey(), nil }) if err != nil { return nil, err @@ -559,19 +559,34 @@ func ParseOAuth2Token(jwtToken string) (*OAuth2Token, error) { // SignToken signs the token with the JWT secret func (token *OAuth2Token) SignToken() (string, error) { token.IssuedAt = time.Now().Unix() - jwtToken := jwt.NewWithClaims(jwt.SigningMethodHS512, token) - return jwtToken.SignedString(setting.OAuth2.JWTSecretBytes) + jwtToken := jwt.NewWithClaims(oauth2.DefaultSigningKey.SigningMethod(), token) + oauth2.DefaultSigningKey.PreProcessToken(jwtToken) + return jwtToken.SignedString(oauth2.DefaultSigningKey.SignKey()) } // OIDCToken represents an OpenID Connect id_token type OIDCToken struct { jwt.StandardClaims Nonce string `json:"nonce,omitempty"` + + // Scope profile + Name string `json:"name,omitempty"` + PreferredUsername string `json:"preferred_username,omitempty"` + Profile string `json:"profile,omitempty"` + Picture string `json:"picture,omitempty"` + Website string `json:"website,omitempty"` + Locale string `json:"locale,omitempty"` + UpdatedAt timeutil.TimeStamp `json:"updated_at,omitempty"` + + // Scope email + Email string `json:"email,omitempty"` + EmailVerified bool `json:"email_verified,omitempty"` } // SignToken signs an id_token with the (symmetric) client secret key -func (token *OIDCToken) SignToken(clientSecret string) (string, error) { +func (token *OIDCToken) SignToken(signingKey oauth2.JWTSigningKey) (string, error) { token.IssuedAt = time.Now().Unix() - jwtToken := jwt.NewWithClaims(jwt.SigningMethodHS256, token) - return jwtToken.SignedString([]byte(clientSecret)) + jwtToken := jwt.NewWithClaims(signingKey.SigningMethod(), token) + signingKey.PreProcessToken(jwtToken) + return jwtToken.SignedString(signingKey.SignKey()) } diff --git a/models/oauth2_application_test.go b/models/oauth2_application_test.go index 511d0194658a..7a4bce85c005 100644 --- a/models/oauth2_application_test.go +++ b/models/oauth2_application_test.go @@ -219,11 +219,11 @@ func TestOAuth2AuthorizationCode_GenerateRedirectURI(t *testing.T) { redirect, err := code.GenerateRedirectURI("thestate") assert.NoError(t, err) - assert.Equal(t, redirect.String(), "https://example.com/callback?code=thecode&state=thestate") + assert.Equal(t, "https://example.com/callback?code=thecode&state=thestate", redirect.String()) redirect, err = code.GenerateRedirectURI("") assert.NoError(t, err) - assert.Equal(t, redirect.String(), "https://example.com/callback?code=thecode") + assert.Equal(t, "https://example.com/callback?code=thecode", redirect.String()) } func TestOAuth2AuthorizationCode_Invalidate(t *testing.T) { diff --git a/models/org.go b/models/org.go index 3474988efc9c..58fb26b1bb51 100644 --- a/models/org.go +++ b/models/org.go @@ -79,7 +79,7 @@ func (org *User) GetMembers() (err error) { return } -// FindOrgMembersOpts represensts find org members condtions +// FindOrgMembersOpts represensts find org members conditions type FindOrgMembersOpts struct { ListOptions OrgID int64 @@ -425,6 +425,25 @@ func GetOrgsByUserID(userID int64, showAll bool) ([]*User, error) { return getOrgsByUserID(sess, userID, showAll) } +// queryUserOrgIDs returns a condition to return user's organization id +func queryUserOrgIDs(uid int64) *builder.Builder { + return builder.Select("team.org_id"). + From("team_user").InnerJoin("team", "team.id = team_user.team_id"). + Where(builder.Eq{"team_user.uid": uid}) +} + +// MinimalOrg represents a simple orgnization with only needed columns +type MinimalOrg = User + +// GetUserOrgsList returns one user's all orgs list +func GetUserOrgsList(uid int64) ([]*MinimalOrg, error) { + var orgs = make([]*MinimalOrg, 0, 20) + return orgs, x.Select("id, name, full_name, visibility, avatar, avatar_email, use_custom_avatar"). + Table("user"). + In("id", queryUserOrgIDs(uid)). + Find(&orgs) +} + func getOwnedOrgsByUserID(sess *xorm.Session, userID int64) ([]*User, error) { orgs := make([]*User, 0, 10) return orgs, sess. @@ -436,22 +455,22 @@ func getOwnedOrgsByUserID(sess *xorm.Session, userID int64) ([]*User, error) { Find(&orgs) } -// HasOrgVisible tells if the given user can see the given org -func HasOrgVisible(org, user *User) bool { - return hasOrgVisible(x, org, user) +// HasOrgOrUserVisible tells if the given user can see the given org or user +func HasOrgOrUserVisible(org, user *User) bool { + return hasOrgOrUserVisible(x, org, user) } -func hasOrgVisible(e Engine, org, user *User) bool { +func hasOrgOrUserVisible(e Engine, orgOrUser, user *User) bool { // Not SignedUser if user == nil { - return org.Visibility == structs.VisibleTypePublic + return orgOrUser.Visibility == structs.VisibleTypePublic } - if user.IsAdmin { + if user.IsAdmin || orgOrUser.ID == user.ID { return true } - if (org.Visibility == structs.VisibleTypePrivate || user.IsRestricted) && !org.hasMemberWithUserID(e, user.ID) { + if (orgOrUser.Visibility == structs.VisibleTypePrivate || user.IsRestricted) && !orgOrUser.hasMemberWithUserID(e, user.ID) { return false } return true @@ -464,7 +483,7 @@ func HasOrgsVisible(orgs []*User, user *User) bool { } for _, org := range orgs { - if HasOrgVisible(org, user) { + if HasOrgOrUserVisible(org, user) { return true } } diff --git a/models/org_test.go b/models/org_test.go index 66979714c13e..e494e502dd31 100644 --- a/models/org_test.go +++ b/models/org_test.go @@ -453,7 +453,7 @@ func TestAddOrgUser(t *testing.T) { assert.NoError(t, AddOrgUser(orgID, userID)) ou := &OrgUser{OrgID: orgID, UID: userID} AssertExistsAndLoadBean(t, ou) - assert.Equal(t, ou.IsPublic, isPublic) + assert.Equal(t, isPublic, ou.IsPublic) org = AssertExistsAndLoadBean(t, &User{ID: orgID}).(*User) assert.EqualValues(t, expectedNumMembers, org.NumMembers) } @@ -586,12 +586,12 @@ func TestHasOrgVisibleTypePublic(t *testing.T) { assert.NoError(t, CreateOrganization(org, owner)) org = AssertExistsAndLoadBean(t, &User{Name: org.Name, Type: UserTypeOrganization}).(*User) - test1 := HasOrgVisible(org, owner) - test2 := HasOrgVisible(org, user3) - test3 := HasOrgVisible(org, nil) - assert.Equal(t, test1, true) // owner of org - assert.Equal(t, test2, true) // user not a part of org - assert.Equal(t, test3, true) // logged out user + test1 := HasOrgOrUserVisible(org, owner) + test2 := HasOrgOrUserVisible(org, user3) + test3 := HasOrgOrUserVisible(org, nil) + assert.True(t, test1) // owner of org + assert.True(t, test2) // user not a part of org + assert.True(t, test3) // logged out user } func TestHasOrgVisibleTypeLimited(t *testing.T) { @@ -609,12 +609,12 @@ func TestHasOrgVisibleTypeLimited(t *testing.T) { assert.NoError(t, CreateOrganization(org, owner)) org = AssertExistsAndLoadBean(t, &User{Name: org.Name, Type: UserTypeOrganization}).(*User) - test1 := HasOrgVisible(org, owner) - test2 := HasOrgVisible(org, user3) - test3 := HasOrgVisible(org, nil) - assert.Equal(t, test1, true) // owner of org - assert.Equal(t, test2, true) // user not a part of org - assert.Equal(t, test3, false) // logged out user + test1 := HasOrgOrUserVisible(org, owner) + test2 := HasOrgOrUserVisible(org, user3) + test3 := HasOrgOrUserVisible(org, nil) + assert.True(t, test1) // owner of org + assert.True(t, test2) // user not a part of org + assert.False(t, test3) // logged out user } func TestHasOrgVisibleTypePrivate(t *testing.T) { @@ -632,12 +632,12 @@ func TestHasOrgVisibleTypePrivate(t *testing.T) { assert.NoError(t, CreateOrganization(org, owner)) org = AssertExistsAndLoadBean(t, &User{Name: org.Name, Type: UserTypeOrganization}).(*User) - test1 := HasOrgVisible(org, owner) - test2 := HasOrgVisible(org, user3) - test3 := HasOrgVisible(org, nil) - assert.Equal(t, test1, true) // owner of org - assert.Equal(t, test2, false) // user not a part of org - assert.Equal(t, test3, false) // logged out user + test1 := HasOrgOrUserVisible(org, owner) + test2 := HasOrgOrUserVisible(org, user3) + test3 := HasOrgOrUserVisible(org, nil) + assert.True(t, test1) // owner of org + assert.False(t, test2) // user not a part of org + assert.False(t, test3) // logged out user } func TestGetUsersWhoCanCreateOrgRepo(t *testing.T) { diff --git a/models/protected_tag.go b/models/protected_tag.go new file mode 100644 index 000000000000..88f20dd29a86 --- /dev/null +++ b/models/protected_tag.go @@ -0,0 +1,131 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package models + +import ( + "regexp" + "strings" + + "code.gitea.io/gitea/modules/base" + "code.gitea.io/gitea/modules/timeutil" + + "github.com/gobwas/glob" +) + +// ProtectedTag struct +type ProtectedTag struct { + ID int64 `xorm:"pk autoincr"` + RepoID int64 + NamePattern string + RegexPattern *regexp.Regexp `xorm:"-"` + GlobPattern glob.Glob `xorm:"-"` + AllowlistUserIDs []int64 `xorm:"JSON TEXT"` + AllowlistTeamIDs []int64 `xorm:"JSON TEXT"` + + CreatedUnix timeutil.TimeStamp `xorm:"created"` + UpdatedUnix timeutil.TimeStamp `xorm:"updated"` +} + +// InsertProtectedTag inserts a protected tag to database +func InsertProtectedTag(pt *ProtectedTag) error { + _, err := x.Insert(pt) + return err +} + +// UpdateProtectedTag updates the protected tag +func UpdateProtectedTag(pt *ProtectedTag) error { + _, err := x.ID(pt.ID).AllCols().Update(pt) + return err +} + +// DeleteProtectedTag deletes a protected tag by ID +func DeleteProtectedTag(pt *ProtectedTag) error { + _, err := x.ID(pt.ID).Delete(&ProtectedTag{}) + return err +} + +// EnsureCompiledPattern ensures the glob pattern is compiled +func (pt *ProtectedTag) EnsureCompiledPattern() error { + if pt.RegexPattern != nil || pt.GlobPattern != nil { + return nil + } + + var err error + if len(pt.NamePattern) >= 2 && strings.HasPrefix(pt.NamePattern, "/") && strings.HasSuffix(pt.NamePattern, "/") { + pt.RegexPattern, err = regexp.Compile(pt.NamePattern[1 : len(pt.NamePattern)-1]) + } else { + pt.GlobPattern, err = glob.Compile(pt.NamePattern) + } + return err +} + +// IsUserAllowed returns true if the user is allowed to modify the tag +func (pt *ProtectedTag) IsUserAllowed(userID int64) (bool, error) { + if base.Int64sContains(pt.AllowlistUserIDs, userID) { + return true, nil + } + + if len(pt.AllowlistTeamIDs) == 0 { + return false, nil + } + + in, err := IsUserInTeams(userID, pt.AllowlistTeamIDs) + if err != nil { + return false, err + } + return in, nil +} + +// GetProtectedTags gets all protected tags of the repository +func (repo *Repository) GetProtectedTags() ([]*ProtectedTag, error) { + tags := make([]*ProtectedTag, 0) + return tags, x.Find(&tags, &ProtectedTag{RepoID: repo.ID}) +} + +// GetProtectedTagByID gets the protected tag with the specific id +func GetProtectedTagByID(id int64) (*ProtectedTag, error) { + tag := new(ProtectedTag) + has, err := x.ID(id).Get(tag) + if err != nil { + return nil, err + } + if !has { + return nil, nil + } + return tag, nil +} + +// IsUserAllowedToControlTag checks if a user can control the specific tag. +// It returns true if the tag name is not protected or the user is allowed to control it. +func IsUserAllowedToControlTag(tags []*ProtectedTag, tagName string, userID int64) (bool, error) { + isAllowed := true + for _, tag := range tags { + err := tag.EnsureCompiledPattern() + if err != nil { + return false, err + } + + if !tag.matchString(tagName) { + continue + } + + isAllowed, err = tag.IsUserAllowed(userID) + if err != nil { + return false, err + } + if isAllowed { + break + } + } + + return isAllowed, nil +} + +func (pt *ProtectedTag) matchString(name string) bool { + if pt.RegexPattern != nil { + return pt.RegexPattern.MatchString(name) + } + return pt.GlobPattern.Match(name) +} diff --git a/models/protected_tag_test.go b/models/protected_tag_test.go new file mode 100644 index 000000000000..3dc895c69fe1 --- /dev/null +++ b/models/protected_tag_test.go @@ -0,0 +1,162 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package models + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestIsUserAllowed(t *testing.T) { + assert.NoError(t, PrepareTestDatabase()) + + pt := &ProtectedTag{} + allowed, err := pt.IsUserAllowed(1) + assert.NoError(t, err) + assert.False(t, allowed) + + pt = &ProtectedTag{ + AllowlistUserIDs: []int64{1}, + } + allowed, err = pt.IsUserAllowed(1) + assert.NoError(t, err) + assert.True(t, allowed) + + allowed, err = pt.IsUserAllowed(2) + assert.NoError(t, err) + assert.False(t, allowed) + + pt = &ProtectedTag{ + AllowlistTeamIDs: []int64{1}, + } + allowed, err = pt.IsUserAllowed(1) + assert.NoError(t, err) + assert.False(t, allowed) + + allowed, err = pt.IsUserAllowed(2) + assert.NoError(t, err) + assert.True(t, allowed) + + pt = &ProtectedTag{ + AllowlistUserIDs: []int64{1}, + AllowlistTeamIDs: []int64{1}, + } + allowed, err = pt.IsUserAllowed(1) + assert.NoError(t, err) + assert.True(t, allowed) + + allowed, err = pt.IsUserAllowed(2) + assert.NoError(t, err) + assert.True(t, allowed) +} + +func TestIsUserAllowedToControlTag(t *testing.T) { + cases := []struct { + name string + userid int64 + allowed bool + }{ + { + name: "test", + userid: 1, + allowed: true, + }, + { + name: "test", + userid: 3, + allowed: true, + }, + { + name: "gitea", + userid: 1, + allowed: true, + }, + { + name: "gitea", + userid: 3, + allowed: false, + }, + { + name: "test-gitea", + userid: 1, + allowed: true, + }, + { + name: "test-gitea", + userid: 3, + allowed: false, + }, + { + name: "gitea-test", + userid: 1, + allowed: true, + }, + { + name: "gitea-test", + userid: 3, + allowed: true, + }, + { + name: "v-1", + userid: 1, + allowed: false, + }, + { + name: "v-1", + userid: 2, + allowed: true, + }, + { + name: "release", + userid: 1, + allowed: false, + }, + } + + t.Run("Glob", func(t *testing.T) { + protectedTags := []*ProtectedTag{ + { + NamePattern: `*gitea`, + AllowlistUserIDs: []int64{1}, + }, + { + NamePattern: `v-*`, + AllowlistUserIDs: []int64{2}, + }, + { + NamePattern: "release", + }, + } + + for n, c := range cases { + isAllowed, err := IsUserAllowedToControlTag(protectedTags, c.name, c.userid) + assert.NoError(t, err) + assert.Equal(t, c.allowed, isAllowed, "case %d: error should match", n) + } + }) + + t.Run("Regex", func(t *testing.T) { + protectedTags := []*ProtectedTag{ + { + NamePattern: `/gitea\z/`, + AllowlistUserIDs: []int64{1}, + }, + { + NamePattern: `/\Av-/`, + AllowlistUserIDs: []int64{2}, + }, + { + NamePattern: "/release/", + }, + } + + for n, c := range cases { + isAllowed, err := IsUserAllowedToControlTag(protectedTags, c.name, c.userid) + assert.NoError(t, err) + assert.Equal(t, c.allowed, isAllowed, "case %d: error should match", n) + } + }) +} diff --git a/models/pull.go b/models/pull.go index 133f196aaed4..3717878f4201 100644 --- a/models/pull.go +++ b/models/pull.go @@ -212,12 +212,21 @@ func (pr *PullRequest) GetDefaultMergeMessage() string { log.Error("Cannot load issue %d for PR id %d: Error: %v", pr.IssueID, pr.ID, err) return "" } + if err := pr.LoadBaseRepo(); err != nil { + log.Error("LoadBaseRepo: %v", err) + return "" + } + + issueReference := "#" + if pr.BaseRepo.UnitEnabled(UnitTypeExternalTracker) { + issueReference = "!" + } if pr.BaseRepoID == pr.HeadRepoID { - return fmt.Sprintf("Merge pull request '%s' (#%d) from %s into %s", pr.Issue.Title, pr.Issue.Index, pr.HeadBranch, pr.BaseBranch) + return fmt.Sprintf("Merge pull request '%s' (%s%d) from %s into %s", pr.Issue.Title, issueReference, pr.Issue.Index, pr.HeadBranch, pr.BaseBranch) } - return fmt.Sprintf("Merge pull request '%s' (#%d) from %s:%s into %s", pr.Issue.Title, pr.Issue.Index, pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseBranch) + return fmt.Sprintf("Merge pull request '%s' (%s%d) from %s:%s into %s", pr.Issue.Title, issueReference, pr.Issue.Index, pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseBranch) } // ReviewCount represents a count of Reviews @@ -418,34 +427,23 @@ func (pr *PullRequest) SetMerged() (bool, error) { } // NewPullRequest creates new pull request with labels for repository. -func NewPullRequest(repo *Repository, pull *Issue, labelIDs []int64, uuids []string, pr *PullRequest) (err error) { - // Retry several times in case INSERT fails due to duplicate key for (repo_id, index); see #7887 - i := 0 - for { - if err = newPullRequestAttempt(repo, pull, labelIDs, uuids, pr); err == nil { - return nil - } - if !IsErrNewIssueInsert(err) { - return err - } - if i++; i == issueMaxDupIndexAttempts { - break - } - log.Error("NewPullRequest: error attempting to insert the new issue; will retry. Original error: %v", err) +func NewPullRequest(repo *Repository, issue *Issue, labelIDs []int64, uuids []string, pr *PullRequest) (err error) { + idx, err := GetNextResourceIndex("issue_index", repo.ID) + if err != nil { + return fmt.Errorf("generate issue index failed: %v", err) } - return fmt.Errorf("NewPullRequest: too many errors attempting to insert the new issue. Last error was: %v", err) -} -func newPullRequestAttempt(repo *Repository, pull *Issue, labelIDs []int64, uuids []string, pr *PullRequest) (err error) { + issue.Index = idx + sess := x.NewSession() defer sess.Close() if err = sess.Begin(); err != nil { return err } - if err = newIssue(sess, pull.Poster, NewIssueOptions{ + if err = newIssue(sess, issue.Poster, NewIssueOptions{ Repo: repo, - Issue: pull, + Issue: issue, LabelIDs: labelIDs, Attachments: uuids, IsPull: true, @@ -456,10 +454,9 @@ func newPullRequestAttempt(repo *Repository, pull *Issue, labelIDs []int64, uuid return fmt.Errorf("newIssue: %v", err) } - pr.Index = pull.Index + pr.Index = issue.Index pr.BaseRepo = repo - - pr.IssueID = pull.ID + pr.IssueID = issue.ID if _, err = sess.Insert(pr); err != nil { return fmt.Errorf("insert pull repo: %v", err) } @@ -598,9 +595,13 @@ func (pr *PullRequest) IsWorkInProgress() bool { log.Error("LoadIssue: %v", err) return false } + return HasWorkInProgressPrefix(pr.Issue.Title) +} +// HasWorkInProgressPrefix determines if the given PR title has a Work In Progress prefix +func HasWorkInProgressPrefix(title string) bool { for _, prefix := range setting.Repository.PullRequest.WorkInProgressPrefixes { - if strings.HasPrefix(strings.ToUpper(pr.Issue.Title), prefix) { + if strings.HasPrefix(strings.ToUpper(title), prefix) { return true } } diff --git a/models/pull_test.go b/models/pull_test.go index 3cc6abfec7e2..5eaeb60e6728 100644 --- a/models/pull_test.go +++ b/models/pull_test.go @@ -234,3 +234,36 @@ func TestPullRequest_GetWorkInProgressPrefixWorkInProgress(t *testing.T) { pr.Issue.Title = "[wip] " + original assert.Equal(t, "[wip]", pr.GetWorkInProgressPrefix()) } + +func TestPullRequest_GetDefaultMergeMessage_InternalTracker(t *testing.T) { + assert.NoError(t, PrepareTestDatabase()) + pr := AssertExistsAndLoadBean(t, &PullRequest{ID: 2}).(*PullRequest) + + assert.Equal(t, "Merge pull request 'issue3' (#3) from branch2 into master", pr.GetDefaultMergeMessage()) + + pr.BaseRepoID = 1 + pr.HeadRepoID = 2 + assert.Equal(t, "Merge pull request 'issue3' (#3) from user2/repo1:branch2 into master", pr.GetDefaultMergeMessage()) +} + +func TestPullRequest_GetDefaultMergeMessage_ExternalTracker(t *testing.T) { + assert.NoError(t, PrepareTestDatabase()) + + externalTracker := RepoUnit{ + Type: UnitTypeExternalTracker, + Config: &ExternalTrackerConfig{ + ExternalTrackerFormat: "https://someurl.com/{user}/{repo}/{issue}", + }, + } + baseRepo := &Repository{Name: "testRepo", ID: 1} + baseRepo.Owner = &User{Name: "testOwner"} + baseRepo.Units = []*RepoUnit{&externalTracker} + + pr := AssertExistsAndLoadBean(t, &PullRequest{ID: 2, BaseRepo: baseRepo}).(*PullRequest) + + assert.Equal(t, "Merge pull request 'issue3' (!3) from branch2 into master", pr.GetDefaultMergeMessage()) + + pr.BaseRepoID = 1 + pr.HeadRepoID = 2 + assert.Equal(t, "Merge pull request 'issue3' (!3) from user2/repo1:branch2 into master", pr.GetDefaultMergeMessage()) +} diff --git a/models/release.go b/models/release.go index 13b8f17218c6..1ce88a8210c9 100644 --- a/models/release.go +++ b/models/release.go @@ -14,6 +14,7 @@ import ( "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/util" "xorm.io/builder" ) @@ -173,6 +174,8 @@ type FindReleasesOptions struct { ListOptions IncludeDrafts bool IncludeTags bool + IsPreRelease util.OptionalBool + IsDraft util.OptionalBool TagNames []string } @@ -189,6 +192,12 @@ func (opts *FindReleasesOptions) toConds(repoID int64) builder.Cond { if len(opts.TagNames) > 0 { cond = cond.And(builder.In("tag_name", opts.TagNames)) } + if !opts.IsPreRelease.IsNone() { + cond = cond.And(builder.Eq{"is_prerelease": opts.IsPreRelease.IsTrue()}) + } + if !opts.IsDraft.IsNone() { + cond = cond.And(builder.Eq{"is_draft": opts.IsDraft.IsTrue()}) + } return cond } @@ -206,6 +215,11 @@ func GetReleasesByRepoID(repoID int64, opts FindReleasesOptions) ([]*Release, er return rels, sess.Find(&rels) } +// CountReleasesByRepoID returns a number of releases matching FindReleaseOptions and RepoID. +func CountReleasesByRepoID(repoID int64, opts FindReleasesOptions) (int64, error) { + return x.Where(opts.toConds(repoID)).Count(new(Release)) +} + // GetLatestReleaseByRepoID returns the latest release for a repository func GetLatestReleaseByRepoID(repoID int64) (*Release, error) { cond := builder.NewCond(). diff --git a/models/repo.go b/models/repo.go index daa94c0d508b..d6abc1b5e388 100644 --- a/models/repo.go +++ b/models/repo.go @@ -216,12 +216,13 @@ type Repository struct { NumClosedProjects int `xorm:"NOT NULL DEFAULT 0"` NumOpenProjects int `xorm:"-"` - IsPrivate bool `xorm:"INDEX"` - IsEmpty bool `xorm:"INDEX"` - IsArchived bool `xorm:"INDEX"` - IsMirror bool `xorm:"INDEX"` - *Mirror `xorm:"-"` - Status RepositoryStatus `xorm:"NOT NULL DEFAULT 0"` + IsPrivate bool `xorm:"INDEX"` + IsEmpty bool `xorm:"INDEX"` + IsArchived bool `xorm:"INDEX"` + IsMirror bool `xorm:"INDEX"` + *Mirror `xorm:"-"` + PushMirrors []*PushMirror `xorm:"-"` + Status RepositoryStatus `xorm:"NOT NULL DEFAULT 0"` RenderingMetas map[string]string `xorm:"-"` DocumentRenderingMetas map[string]string `xorm:"-"` @@ -255,7 +256,12 @@ func (repo *Repository) SanitizedOriginalURL() string { if repo.OriginalURL == "" { return "" } - return util.SanitizeURLCredentials(repo.OriginalURL, false) + u, err := url.Parse(repo.OriginalURL) + if err != nil { + return "" + } + u.User = nil + return u.String() } // ColorFormat returns a colored string to represent this repo @@ -579,8 +585,7 @@ func (repo *Repository) getReviewers(e Engine, doerID, posterID int64) ([]*User, var users []*User - if repo.IsPrivate || - (repo.Owner.IsOrganization() && repo.Owner.Visibility == api.VisibleTypePrivate) { + if repo.IsPrivate || repo.Owner.Visibility == api.VisibleTypePrivate { // This a private repository: // Anyone who can read the repository is a requestable reviewer if err := e. @@ -657,6 +662,12 @@ func (repo *Repository) GetMirror() (err error) { return err } +// LoadPushMirrors populates the repository push mirrors. +func (repo *Repository) LoadPushMirrors() (err error) { + repo.PushMirrors, err = GetPushMirrorsByRepoID(repo.ID) + return err +} + // GetBaseRepo populates repo.BaseRepo for a fork repository and // returns an error on failure (NOTE: no error is returned for // non-fork repositories, and BaseRepo will be left untouched) @@ -1024,7 +1035,7 @@ func GetRepoInitFile(tp, name string) ([]byte, error) { var ( reservedRepoNames = []string{".", ".."} - reservedRepoPatterns = []string{"*.git", "*.wiki"} + reservedRepoPatterns = []string{"*.git", "*.wiki", "*.rss", "*.atom"} ) // IsUsableRepoName returns true when repository is usable @@ -1216,7 +1227,7 @@ func ChangeRepositoryName(doer *User, repo *Repository, newRepoName string) (err } newRepoPath := RepoPath(repo.Owner.Name, newRepoName) - if err = os.Rename(repo.RepoPath(), newRepoPath); err != nil { + if err = util.Rename(repo.RepoPath(), newRepoPath); err != nil { return fmt.Errorf("rename repository directory: %v", err) } @@ -1227,7 +1238,7 @@ func ChangeRepositoryName(doer *User, repo *Repository, newRepoName string) (err return err } if isExist { - if err = os.Rename(wikiPath, WikiPath(repo.Owner.Name, newRepoName)); err != nil { + if err = util.Rename(wikiPath, WikiPath(repo.Owner.Name, newRepoName)); err != nil { return fmt.Errorf("rename repository wiki: %v", err) } } @@ -1350,6 +1361,26 @@ func UpdateRepository(repo *Repository, visibilityChanged bool) (err error) { return sess.Commit() } +// UpdateRepositoryOwnerNames updates repository owner_names (this should only be used when the ownerName has changed case) +func UpdateRepositoryOwnerNames(ownerID int64, ownerName string) error { + if ownerID == 0 { + return nil + } + sess := x.NewSession() + defer sess.Close() + if err := sess.Begin(); err != nil { + return err + } + + if _, err := sess.Where("owner_id = ?", ownerID).Cols("owner_name").Update(&Repository{ + OwnerName: ownerName, + }); err != nil { + return err + } + + return sess.Commit() +} + // UpdateRepositoryUpdatedTime updates a repository's updated time func UpdateRepositoryUpdatedTime(repoID int64, updateTime time.Time) error { _, err := x.Exec("UPDATE repository SET updated_unix = ? WHERE id = ?", updateTime.Unix(), repoID) @@ -1466,7 +1497,9 @@ func DeleteRepository(doer *User, uid, repoID int64) error { &Mirror{RepoID: repoID}, &Notification{RepoID: repoID}, &ProtectedBranch{RepoID: repoID}, + &ProtectedTag{RepoID: repoID}, &PullRequest{BaseRepoID: repoID}, + &PushMirror{RepoID: repoID}, &Release{RepoID: repoID}, &RepoIndexerStatus{RepoID: repoID}, &RepoRedirect{RedirectRepoID: repoID}, @@ -1490,6 +1523,11 @@ func DeleteRepository(doer *User, uid, repoID int64) error { return err } + // Delete issue index + if err := deleteResouceIndex(sess, "issue_index", repoID); err != nil { + return err + } + if repo.IsFork { if _, err := sess.Exec("UPDATE `repository` SET num_forks=num_forks-1 WHERE id=?", repo.ForkID); err != nil { return fmt.Errorf("decrease fork count: %v", err) @@ -1549,6 +1587,22 @@ func DeleteRepository(doer *User, uid, repoID int64) error { return err } + // Remove archives + var archives []*RepoArchiver + if err = sess.Where("repo_id=?", repoID).Find(&archives); err != nil { + return err + } + + for _, v := range archives { + v.Repo = repo + p, _ := v.RelativePath() + removeStorageWithNotice(sess, storage.RepoArchives, "Delete repo archive file", p) + } + + if _, err := sess.Delete(&RepoArchiver{RepoID: repoID}); err != nil { + return err + } + if repo.NumForks > 0 { if _, err = sess.Exec("UPDATE `repository` SET fork_id=0,is_fork=? WHERE fork_id=?", false, repo.ID); err != nil { log.Error("reset 'fork_id' and 'is_fork': %v", err) @@ -1562,7 +1616,7 @@ func DeleteRepository(doer *User, uid, repoID int64) error { sess.Close() // We should always delete the files after the database transaction succeed. If - // we delete the file but the database rollback, the repository will be borken. + // we delete the file but the database rollback, the repository will be broken. // Remove issue attachment files. for i := range attachmentPaths { @@ -1710,84 +1764,49 @@ func GetPrivateRepositoryCount(u *User) (int64, error) { return getPrivateRepositoryCount(x, u) } -// DeleteRepositoryArchives deletes all repositories' archives. -func DeleteRepositoryArchives(ctx context.Context) error { - return x. - Where("id > 0"). - Iterate(new(Repository), - func(idx int, bean interface{}) error { - repo := bean.(*Repository) - select { - case <-ctx.Done(): - return ErrCancelledf("before deleting repository archives for %s", repo.FullName()) - default: - } - return util.RemoveAll(filepath.Join(repo.RepoPath(), "archives")) - }) -} - // DeleteOldRepositoryArchives deletes old repository archives. func DeleteOldRepositoryArchives(ctx context.Context, olderThan time.Duration) error { log.Trace("Doing: ArchiveCleanup") - if err := x.Where("id > 0").Iterate(new(Repository), func(idx int, bean interface{}) error { - return deleteOldRepositoryArchives(ctx, olderThan, idx, bean) - }); err != nil { - log.Trace("Error: ArchiveClean: %v", err) - return err - } - - log.Trace("Finished: ArchiveCleanup") - return nil -} - -func deleteOldRepositoryArchives(ctx context.Context, olderThan time.Duration, idx int, bean interface{}) error { - repo := bean.(*Repository) - basePath := filepath.Join(repo.RepoPath(), "archives") - - for _, ty := range []string{"zip", "targz"} { - select { - case <-ctx.Done(): - return ErrCancelledf("before deleting old repository archives with filetype %s for %s", ty, repo.FullName()) - default: - } - - path := filepath.Join(basePath, ty) - file, err := os.Open(path) - if err != nil { - if !os.IsNotExist(err) { - log.Warn("Unable to open directory %s: %v", path, err) - return err - } - - // If the directory doesn't exist, that's okay. - continue - } - - files, err := file.Readdir(0) - file.Close() + for { + var archivers []RepoArchiver + err := x.Where("created_unix < ?", time.Now().Add(-olderThan).Unix()). + Asc("created_unix"). + Limit(100). + Find(&archivers) if err != nil { - log.Warn("Unable to read directory %s: %v", path, err) + log.Trace("Error: ArchiveClean: %v", err) return err } - minimumOldestTime := time.Now().Add(-olderThan) - for _, info := range files { - if info.ModTime().Before(minimumOldestTime) && !info.IsDir() { - select { - case <-ctx.Done(): - return ErrCancelledf("before deleting old repository archive file %s with filetype %s for %s", info.Name(), ty, repo.FullName()) - default: - } - toDelete := filepath.Join(path, info.Name()) - // This is a best-effort purge, so we do not check error codes to confirm removal. - if err = util.Remove(toDelete); err != nil { - log.Trace("Unable to delete %s, but proceeding: %v", toDelete, err) - } + for _, archiver := range archivers { + if err := deleteOldRepoArchiver(ctx, &archiver); err != nil { + return err } } + if len(archivers) < 100 { + break + } } + log.Trace("Finished: ArchiveCleanup") + return nil +} + +var delRepoArchiver = new(RepoArchiver) + +func deleteOldRepoArchiver(ctx context.Context, archiver *RepoArchiver) error { + p, err := archiver.RelativePath() + if err != nil { + return err + } + _, err = x.ID(archiver.ID).Delete(delRepoArchiver) + if err != nil { + return err + } + if err := storage.RepoArchives.Delete(p); err != nil { + log.Error("delete repo archive file failed: %v", err) + } return nil } diff --git a/models/repo_archiver.go b/models/repo_archiver.go new file mode 100644 index 000000000000..677f0d3e805d --- /dev/null +++ b/models/repo_archiver.go @@ -0,0 +1,87 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package models + +import ( + "fmt" + + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/timeutil" +) + +// RepoArchiverStatus represents repo archive status +type RepoArchiverStatus int + +// enumerate all repo archive statuses +const ( + RepoArchiverGenerating = iota // the archiver is generating + RepoArchiverReady // it's ready +) + +// RepoArchiver represents all archivers +type RepoArchiver struct { + ID int64 `xorm:"pk autoincr"` + RepoID int64 `xorm:"index unique(s)"` + Repo *Repository `xorm:"-"` + Type git.ArchiveType `xorm:"unique(s)"` + Status RepoArchiverStatus + CommitID string `xorm:"VARCHAR(40) unique(s)"` + CreatedUnix timeutil.TimeStamp `xorm:"INDEX NOT NULL created"` +} + +// LoadRepo loads repository +func (archiver *RepoArchiver) LoadRepo() (*Repository, error) { + if archiver.Repo != nil { + return archiver.Repo, nil + } + + var repo Repository + has, err := x.ID(archiver.RepoID).Get(&repo) + if err != nil { + return nil, err + } + if !has { + return nil, ErrRepoNotExist{ + ID: archiver.RepoID, + } + } + return &repo, nil +} + +// RelativePath returns relative path +func (archiver *RepoArchiver) RelativePath() (string, error) { + return fmt.Sprintf("%d/%s/%s.%s", archiver.RepoID, archiver.CommitID[:2], archiver.CommitID, archiver.Type.String()), nil +} + +// GetRepoArchiver get an archiver +func GetRepoArchiver(ctx DBContext, repoID int64, tp git.ArchiveType, commitID string) (*RepoArchiver, error) { + var archiver RepoArchiver + has, err := ctx.e.Where("repo_id=?", repoID).And("`type`=?", tp).And("commit_id=?", commitID).Get(&archiver) + if err != nil { + return nil, err + } + if has { + return &archiver, nil + } + return nil, nil +} + +// AddRepoArchiver adds an archiver +func AddRepoArchiver(ctx DBContext, archiver *RepoArchiver) error { + _, err := ctx.e.Insert(archiver) + return err +} + +// UpdateRepoArchiverStatus updates archiver's status +func UpdateRepoArchiverStatus(ctx DBContext, archiver *RepoArchiver) error { + _, err := ctx.e.ID(archiver.ID).Cols("status").Update(archiver) + return err +} + +// DeleteAllRepoArchives deletes all repo archives records +func DeleteAllRepoArchives() error { + _, err := x.Where("1=1").Delete(new(RepoArchiver)) + return err +} diff --git a/models/repo_generate_test.go b/models/repo_generate_test.go index 53ab4fcd3d55..e7a93433a7be 100644 --- a/models/repo_generate_test.go +++ b/models/repo_generate_test.go @@ -25,7 +25,7 @@ text/*.txt func TestGiteaTemplate(t *testing.T) { gt := GiteaTemplate{Content: giteaTemplate} - assert.Equal(t, len(gt.Globs()), 3) + assert.Len(t, gt.Globs(), 3) tt := []struct { Path string diff --git a/models/repo_list.go b/models/repo_list.go index b4a6d9e438fc..b988cefface8 100644 --- a/models/repo_list.go +++ b/models/repo_list.go @@ -148,11 +148,11 @@ type SearchRepoOptions struct { AllLimited bool // Include also all public repositories of limited organisations // None -> include public and private // True -> include just private - // False -> incude just public + // False -> include just public IsPrivate util.OptionalBool // None -> include collaborative AND non-collaborative // True -> include just collaborative - // False -> incude just non-collaborative + // False -> include just non-collaborative Collaborate util.OptionalBool // None -> include forks AND non-forks // True -> include just forks diff --git a/models/repo_mirror.go b/models/repo_mirror.go index 2c37b54aa99b..cd1f74cb2469 100644 --- a/models/repo_mirror.go +++ b/models/repo_mirror.go @@ -14,6 +14,12 @@ import ( "xorm.io/xorm" ) +// RemoteMirrorer defines base methods for pull/push mirrors. +type RemoteMirrorer interface { + GetRepository() *Repository + GetRemoteName() string +} + // Mirror represents mirror information of a repository. type Mirror struct { ID int64 `xorm:"pk autoincr"` @@ -52,6 +58,16 @@ func (m *Mirror) AfterLoad(session *xorm.Session) { } } +// GetRepository returns the repository. +func (m *Mirror) GetRepository() *Repository { + return m.Repo +} + +// GetRemoteName returns the name of the remote. +func (m *Mirror) GetRemoteName() string { + return "origin" +} + // ScheduleNextUpdate calculates and sets next update time. func (m *Mirror) ScheduleNextUpdate() { if m.Interval != 0 { diff --git a/models/repo_permission.go b/models/repo_permission.go index 138613b2e92e..f5138fc54ce4 100644 --- a/models/repo_permission.go +++ b/models/repo_permission.go @@ -176,9 +176,9 @@ func getUserRepoPermission(e Engine, repo *Repository, user *User) (perm Permiss return } - // Prevent strangers from checking out public repo of private orginization - // Allow user if they are collaborator of a repo within a private orginization but not a member of the orginization itself - if repo.Owner.IsOrganization() && !hasOrgVisible(e, repo.Owner, user) && !isCollaborator { + // Prevent strangers from checking out public repo of private organization/users + // Allow user if they are collaborator of a repo within a private user or a private organization but not a member of the organization itself + if !hasOrgOrUserVisible(e, repo.Owner, user) && !isCollaborator { perm.AccessMode = AccessModeNone return } @@ -351,7 +351,7 @@ func hasAccessUnit(e Engine, user *User, repo *Repository, unitType UnitType, te return testMode <= mode, err } -// HasAccessUnit returns ture if user has testMode to the unit of the repository +// HasAccessUnit returns true if user has testMode to the unit of the repository func HasAccessUnit(user *User, repo *Repository, unitType UnitType, testMode AccessMode) (bool, error) { return hasAccessUnit(x, user, repo, unitType, testMode) } diff --git a/models/repo_pushmirror.go b/models/repo_pushmirror.go new file mode 100644 index 000000000000..439972ffa6cc --- /dev/null +++ b/models/repo_pushmirror.go @@ -0,0 +1,106 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package models + +import ( + "errors" + "time" + + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/timeutil" + + "xorm.io/xorm" +) + +var ( + // ErrPushMirrorNotExist mirror does not exist error + ErrPushMirrorNotExist = errors.New("PushMirror does not exist") +) + +// PushMirror represents mirror information of a repository. +type PushMirror struct { + ID int64 `xorm:"pk autoincr"` + RepoID int64 `xorm:"INDEX"` + Repo *Repository `xorm:"-"` + RemoteName string + + Interval time.Duration + CreatedUnix timeutil.TimeStamp `xorm:"created"` + LastUpdateUnix timeutil.TimeStamp `xorm:"INDEX last_update"` + LastError string `xorm:"text"` +} + +// AfterLoad is invoked from XORM after setting the values of all fields of this object. +func (m *PushMirror) AfterLoad(session *xorm.Session) { + if m == nil { + return + } + + var err error + m.Repo, err = getRepositoryByID(session, m.RepoID) + if err != nil { + log.Error("getRepositoryByID[%d]: %v", m.ID, err) + } +} + +// GetRepository returns the path of the repository. +func (m *PushMirror) GetRepository() *Repository { + return m.Repo +} + +// GetRemoteName returns the name of the remote. +func (m *PushMirror) GetRemoteName() string { + return m.RemoteName +} + +// InsertPushMirror inserts a push-mirror to database +func InsertPushMirror(m *PushMirror) error { + _, err := x.Insert(m) + return err +} + +// UpdatePushMirror updates the push-mirror +func UpdatePushMirror(m *PushMirror) error { + _, err := x.ID(m.ID).AllCols().Update(m) + return err +} + +// DeletePushMirrorByID deletes a push-mirrors by ID +func DeletePushMirrorByID(ID int64) error { + _, err := x.ID(ID).Delete(&PushMirror{}) + return err +} + +// DeletePushMirrorsByRepoID deletes all push-mirrors by repoID +func DeletePushMirrorsByRepoID(repoID int64) error { + _, err := x.Delete(&PushMirror{RepoID: repoID}) + return err +} + +// GetPushMirrorByID returns push-mirror information. +func GetPushMirrorByID(ID int64) (*PushMirror, error) { + m := &PushMirror{} + has, err := x.ID(ID).Get(m) + if err != nil { + return nil, err + } else if !has { + return nil, ErrPushMirrorNotExist + } + return m, nil +} + +// GetPushMirrorsByRepoID returns push-mirror information of a repository. +func GetPushMirrorsByRepoID(repoID int64) ([]*PushMirror, error) { + mirrors := make([]*PushMirror, 0, 10) + return mirrors, x.Where("repo_id=?", repoID).Find(&mirrors) +} + +// PushMirrorsIterate iterates all push-mirror repositories. +func PushMirrorsIterate(f func(idx int, bean interface{}) error) error { + return x. + Where("last_update + (`interval` / ?) <= ?", time.Second, time.Now().Unix()). + And("`interval` != 0"). + Iterate(new(PushMirror), f) +} diff --git a/models/repo_pushmirror_test.go b/models/repo_pushmirror_test.go new file mode 100644 index 000000000000..66c499b1c359 --- /dev/null +++ b/models/repo_pushmirror_test.go @@ -0,0 +1,49 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package models + +import ( + "testing" + "time" + + "code.gitea.io/gitea/modules/timeutil" + + "github.com/stretchr/testify/assert" +) + +func TestPushMirrorsIterate(t *testing.T) { + assert.NoError(t, PrepareTestDatabase()) + + now := timeutil.TimeStampNow() + + InsertPushMirror(&PushMirror{ + RemoteName: "test-1", + LastUpdateUnix: now, + Interval: 1, + }) + + long, _ := time.ParseDuration("24h") + InsertPushMirror(&PushMirror{ + RemoteName: "test-2", + LastUpdateUnix: now, + Interval: long, + }) + + InsertPushMirror(&PushMirror{ + RemoteName: "test-3", + LastUpdateUnix: now, + Interval: 0, + }) + + time.Sleep(1 * time.Millisecond) + + PushMirrorsIterate(func(idx int, bean interface{}) error { + m, ok := bean.(*PushMirror) + assert.True(t, ok) + assert.Equal(t, "test-1", m.RemoteName) + assert.Equal(t, m.RemoteName, m.GetRemoteName()) + return nil + }) +} diff --git a/models/repo_test.go b/models/repo_test.go index 10ba2c99f897..28eb9baa17db 100644 --- a/models/repo_test.go +++ b/models/repo_test.go @@ -61,8 +61,8 @@ func TestMetas(t *testing.T) { metas = repo.ComposeMetas() assert.Contains(t, metas, "org") assert.Contains(t, metas, "teams") - assert.Equal(t, metas["org"], "user3") - assert.Equal(t, metas["teams"], ",owners,team1,") + assert.Equal(t, "user3", metas["org"]) + assert.Equal(t, ",owners,team1,", metas["teams"]) } func TestGetRepositoryCount(t *testing.T) { @@ -111,7 +111,7 @@ func TestUpdateRepositoryVisibilityChanged(t *testing.T) { _, err = x.ID(3).Get(&act) assert.NoError(t, err) - assert.Equal(t, true, act.IsPrivate) + assert.True(t, act.IsPrivate) } func TestGetUserFork(t *testing.T) { @@ -199,13 +199,13 @@ func TestRepoGetReviewers(t *testing.T) { reviewers, err := repo1.GetReviewers(2, 2) assert.NoError(t, err) - assert.Equal(t, 4, len(reviewers)) + assert.Len(t, reviewers, 4) // test private repo repo2 := AssertExistsAndLoadBean(t, &Repository{ID: 2}).(*Repository) reviewers, err = repo2.GetReviewers(2, 2) assert.NoError(t, err) - assert.Equal(t, 0, len(reviewers)) + assert.Empty(t, reviewers) } func TestRepoGetReviewerTeams(t *testing.T) { @@ -219,5 +219,5 @@ func TestRepoGetReviewerTeams(t *testing.T) { repo3 := AssertExistsAndLoadBean(t, &Repository{ID: 3}).(*Repository) teams, err = repo3.GetReviewerTeams() assert.NoError(t, err) - assert.Equal(t, 2, len(teams)) + assert.Len(t, teams, 2) } diff --git a/models/repo_transfer.go b/models/repo_transfer.go index c5d1a3a3c217..d7ef0a8ca6b3 100644 --- a/models/repo_transfer.go +++ b/models/repo_transfer.go @@ -210,13 +210,13 @@ func TransferOwnership(doer *User, newOwnerName string, repo *Repository) (err e } if repoRenamed { - if err := os.Rename(RepoPath(newOwnerName, repo.Name), RepoPath(oldOwnerName, repo.Name)); err != nil { + if err := util.Rename(RepoPath(newOwnerName, repo.Name), RepoPath(oldOwnerName, repo.Name)); err != nil { log.Critical("Unable to move repository %s/%s directory from %s back to correct place %s: %v", oldOwnerName, repo.Name, RepoPath(newOwnerName, repo.Name), RepoPath(oldOwnerName, repo.Name), err) } } if wikiRenamed { - if err := os.Rename(WikiPath(newOwnerName, repo.Name), WikiPath(oldOwnerName, repo.Name)); err != nil { + if err := util.Rename(WikiPath(newOwnerName, repo.Name), WikiPath(oldOwnerName, repo.Name)); err != nil { log.Critical("Unable to move wiki for repository %s/%s directory from %s back to correct place %s: %v", oldOwnerName, repo.Name, WikiPath(newOwnerName, repo.Name), WikiPath(oldOwnerName, repo.Name), err) } } @@ -358,7 +358,7 @@ func TransferOwnership(doer *User, newOwnerName string, repo *Repository) (err e return fmt.Errorf("Failed to create dir %s: %v", dir, err) } - if err := os.Rename(RepoPath(oldOwner.Name, repo.Name), RepoPath(newOwner.Name, repo.Name)); err != nil { + if err := util.Rename(RepoPath(oldOwner.Name, repo.Name), RepoPath(newOwner.Name, repo.Name)); err != nil { return fmt.Errorf("rename repository directory: %v", err) } repoRenamed = true @@ -370,7 +370,7 @@ func TransferOwnership(doer *User, newOwnerName string, repo *Repository) (err e log.Error("Unable to check if %s exists. Error: %v", wikiPath, err) return err } else if isExist { - if err := os.Rename(wikiPath, WikiPath(newOwner.Name, repo.Name)); err != nil { + if err := util.Rename(wikiPath, WikiPath(newOwner.Name, repo.Name)); err != nil { return fmt.Errorf("rename repository wiki: %v", err) } wikiRenamed = true diff --git a/models/repo_unit.go b/models/repo_unit.go index 1d54579a6e72..a12e056a7d5a 100644 --- a/models/repo_unit.go +++ b/models/repo_unit.go @@ -28,8 +28,7 @@ type UnitConfig struct{} // FromDB fills up a UnitConfig from serialized format. func (cfg *UnitConfig) FromDB(bs []byte) error { - json := jsoniter.ConfigCompatibleWithStandardLibrary - return json.Unmarshal(bs, &cfg) + return jsonUnmarshalIgnoreErroneousBOM(bs, &cfg) } // ToDB exports a UnitConfig to a serialized format. @@ -45,8 +44,7 @@ type ExternalWikiConfig struct { // FromDB fills up a ExternalWikiConfig from serialized format. func (cfg *ExternalWikiConfig) FromDB(bs []byte) error { - json := jsoniter.ConfigCompatibleWithStandardLibrary - return json.Unmarshal(bs, &cfg) + return jsonUnmarshalIgnoreErroneousBOM(bs, &cfg) } // ToDB exports a ExternalWikiConfig to a serialized format. @@ -64,8 +62,7 @@ type ExternalTrackerConfig struct { // FromDB fills up a ExternalTrackerConfig from serialized format. func (cfg *ExternalTrackerConfig) FromDB(bs []byte) error { - json := jsoniter.ConfigCompatibleWithStandardLibrary - return json.Unmarshal(bs, &cfg) + return jsonUnmarshalIgnoreErroneousBOM(bs, &cfg) } // ToDB exports a ExternalTrackerConfig to a serialized format. @@ -83,8 +80,7 @@ type IssuesConfig struct { // FromDB fills up a IssuesConfig from serialized format. func (cfg *IssuesConfig) FromDB(bs []byte) error { - json := jsoniter.ConfigCompatibleWithStandardLibrary - return json.Unmarshal(bs, &cfg) + return jsonUnmarshalIgnoreErroneousBOM(bs, &cfg) } // ToDB exports a IssuesConfig to a serialized format. @@ -95,20 +91,20 @@ func (cfg *IssuesConfig) ToDB() ([]byte, error) { // PullRequestsConfig describes pull requests config type PullRequestsConfig struct { - IgnoreWhitespaceConflicts bool - AllowMerge bool - AllowRebase bool - AllowRebaseMerge bool - AllowSquash bool - AllowManualMerge bool - AutodetectManualMerge bool - DefaultMergeStyle MergeStyle + IgnoreWhitespaceConflicts bool + AllowMerge bool + AllowRebase bool + AllowRebaseMerge bool + AllowSquash bool + AllowManualMerge bool + AutodetectManualMerge bool + DefaultDeleteBranchAfterMerge bool + DefaultMergeStyle MergeStyle } // FromDB fills up a PullRequestsConfig from serialized format. func (cfg *PullRequestsConfig) FromDB(bs []byte) error { - json := jsoniter.ConfigCompatibleWithStandardLibrary - return json.Unmarshal(bs, &cfg) + return jsonUnmarshalIgnoreErroneousBOM(bs, &cfg) } // ToDB exports a PullRequestsConfig to a serialized format. diff --git a/models/review.go b/models/review.go index 343621c0fa5a..acb54d970fdc 100644 --- a/models/review.go +++ b/models/review.go @@ -347,7 +347,7 @@ func IsContentEmptyErr(err error) bool { } // SubmitReview creates a review out of the existing pending review or creates a new one if no pending review exist -func SubmitReview(doer *User, issue *Issue, reviewType ReviewType, content, commitID string, stale bool) (*Review, *Comment, error) { +func SubmitReview(doer *User, issue *Issue, reviewType ReviewType, content, commitID string, stale bool, attachmentUUIDs []string) (*Review, *Comment, error) { sess := x.NewSession() defer sess.Close() if err := sess.Begin(); err != nil { @@ -419,12 +419,13 @@ func SubmitReview(doer *User, issue *Issue, reviewType ReviewType, content, comm } comm, err := createComment(sess, &CreateCommentOptions{ - Type: CommentTypeReview, - Doer: doer, - Content: review.Content, - Issue: issue, - Repo: issue.Repo, - ReviewID: review.ID, + Type: CommentTypeReview, + Doer: doer, + Content: review.Content, + Issue: issue, + Repo: issue.Repo, + ReviewID: review.ID, + Attachments: attachmentUUIDs, }) if err != nil || comm == nil { return nil, nil, err @@ -465,7 +466,7 @@ func GetReviewersByIssueID(issueID int64) ([]*Review, error) { return nil, err } - // Get latest review of each reviwer, sorted in order they were made + // Get latest review of each reviewer, sorted in order they were made if err := sess.SQL("SELECT * FROM review WHERE id IN (SELECT max(id) as id FROM review WHERE issue_id = ? AND reviewer_team_id = 0 AND type in (?, ?, ?) AND dismissed = ? AND original_author_id = 0 GROUP BY issue_id, reviewer_id) ORDER BY review.updated_unix ASC", issueID, ReviewTypeApprove, ReviewTypeReject, ReviewTypeRequest, false). Find(&reviews); err != nil { @@ -490,7 +491,7 @@ func GetReviewersByIssueID(issueID int64) ([]*Review, error) { func GetReviewersFromOriginalAuthorsByIssueID(issueID int64) ([]*Review, error) { reviews := make([]*Review, 0, 10) - // Get latest review of each reviwer, sorted in order they were made + // Get latest review of each reviewer, sorted in order they were made if err := x.SQL("SELECT * FROM review WHERE id IN (SELECT max(id) as id FROM review WHERE issue_id = ? AND reviewer_team_id = 0 AND type in (?, ?, ?) AND original_author_id <> 0 GROUP BY issue_id, original_author_id) ORDER BY review.updated_unix ASC", issueID, ReviewTypeApprove, ReviewTypeReject, ReviewTypeRequest). Find(&reviews); err != nil { diff --git a/models/ssh_key.go b/models/ssh_key.go index 9f9c33e848f5..12c7bc91162a 100644 --- a/models/ssh_key.go +++ b/models/ssh_key.go @@ -38,7 +38,6 @@ import ( const ( tplCommentPrefix = `# gitea public key` - tplCommand = "%s --config=%s serv key-%d" tplPublicKey = tplCommentPrefix + "\n" + `command=%s,no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty %s` + "\n" authorizedPrincipalsFile = "authorized_principals" @@ -88,7 +87,16 @@ func (key *PublicKey) OmitEmail() string { // AuthorizedString returns formatted public key string for authorized_keys file. func (key *PublicKey) AuthorizedString() string { - return fmt.Sprintf(tplPublicKey, util.ShellEscape(fmt.Sprintf(tplCommand, util.ShellEscape(setting.AppPath), util.ShellEscape(setting.CustomConf), key.ID)), key.Content) + sb := &strings.Builder{} + _ = setting.SSH.AuthorizedKeysCommandTemplateTemplate.Execute(sb, map[string]interface{}{ + "AppPath": util.ShellEscape(setting.AppPath), + "AppWorkPath": util.ShellEscape(setting.AppWorkPath), + "CustomConf": util.ShellEscape(setting.CustomConf), + "CustomPath": util.ShellEscape(setting.CustomPath), + "Key": key, + }) + + return fmt.Sprintf(tplPublicKey, util.ShellEscape(sb.String()), key.Content) } func extractTypeFromBase64Key(key string) (string, error) { @@ -834,7 +842,7 @@ func rewriteAllPublicKeys(e Engine) error { } t.Close() - return os.Rename(tmpPath, fPath) + return util.Rename(tmpPath, fPath) } // RegeneratePublicKeys regenerates the authorized_keys file @@ -1316,7 +1324,7 @@ func rewriteAllPrincipalKeys(e Engine) error { } t.Close() - return os.Rename(tmpPath, fPath) + return util.Rename(tmpPath, fPath) } // ListPrincipalKeys returns a list of principals belongs to given user. diff --git a/models/task.go b/models/task.go index 8d4bfbf07607..5f9ccc6bfaf5 100644 --- a/models/task.go +++ b/models/task.go @@ -8,8 +8,11 @@ import ( "fmt" migration "code.gitea.io/gitea/modules/migrations/base" + "code.gitea.io/gitea/modules/secret" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/util" jsoniter "github.com/json-iterator/go" "xorm.io/builder" @@ -29,10 +32,16 @@ type Task struct { StartTime timeutil.TimeStamp EndTime timeutil.TimeStamp PayloadContent string `xorm:"TEXT"` - Errors string `xorm:"TEXT"` // if task failed, saved the error reason + Message string `xorm:"TEXT"` // if task failed, saved the error reason Created timeutil.TimeStamp `xorm:"created"` } +// TranslatableMessage represents JSON struct that can be translated with a Locale +type TranslatableMessage struct { + Format string + Args []interface{} `json:"omitempty"` +} + // LoadRepo loads repository of the task func (task *Task) LoadRepo() error { return task.loadRepo(x) @@ -110,6 +119,24 @@ func (task *Task) MigrateConfig() (*migration.MigrateOptions, error) { if err != nil { return nil, err } + + // decrypt credentials + if opts.CloneAddrEncrypted != "" { + if opts.CloneAddr, err = secret.DecryptSecret(setting.SecretKey, opts.CloneAddrEncrypted); err != nil { + return nil, err + } + } + if opts.AuthPasswordEncrypted != "" { + if opts.AuthPassword, err = secret.DecryptSecret(setting.SecretKey, opts.AuthPasswordEncrypted); err != nil { + return nil, err + } + } + if opts.AuthTokenEncrypted != "" { + if opts.AuthToken, err = secret.DecryptSecret(setting.SecretKey, opts.AuthTokenEncrypted); err != nil { + return nil, err + } + } + return &opts, nil } return nil, fmt.Errorf("Task type is %s, not Migrate Repo", task.Type.Name()) @@ -205,12 +232,31 @@ func createTask(e Engine, task *Task) error { func FinishMigrateTask(task *Task) error { task.Status = structs.TaskStatusFinished task.EndTime = timeutil.TimeStampNow() + + // delete credentials when we're done, they're a liability. + conf, err := task.MigrateConfig() + if err != nil { + return err + } + conf.AuthPassword = "" + conf.AuthToken = "" + conf.CloneAddr = util.NewStringURLSanitizer(conf.CloneAddr, true).Replace(conf.CloneAddr) + conf.AuthPasswordEncrypted = "" + conf.AuthTokenEncrypted = "" + conf.CloneAddrEncrypted = "" + json := jsoniter.ConfigCompatibleWithStandardLibrary + confBytes, err := json.Marshal(conf) + if err != nil { + return err + } + task.PayloadContent = string(confBytes) + sess := x.NewSession() defer sess.Close() if err := sess.Begin(); err != nil { return err } - if _, err := sess.ID(task.ID).Cols("status", "end_time").Update(task); err != nil { + if _, err := sess.ID(task.ID).Cols("status", "end_time", "payload_content").Update(task); err != nil { return err } diff --git a/models/token.go b/models/token.go index 1245098df0bb..357afe44a7c0 100644 --- a/models/token.go +++ b/models/token.go @@ -10,8 +10,8 @@ import ( "time" "code.gitea.io/gitea/modules/base" - "code.gitea.io/gitea/modules/generate" "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/util" gouuid "github.com/google/uuid" ) @@ -40,7 +40,7 @@ func (t *AccessToken) AfterLoad() { // NewAccessToken creates new access token. func NewAccessToken(t *AccessToken) error { - salt, err := generate.GetRandomString(10) + salt, err := util.RandomString(10) if err != nil { return err } @@ -57,9 +57,15 @@ func GetAccessTokenBySHA(token string) (*AccessToken, error) { if token == "" { return nil, ErrAccessTokenEmpty{} } - if len(token) < 8 { + // A token is defined as being SHA1 sum these are 40 hexadecimal bytes long + if len(token) != 40 { return nil, ErrAccessTokenNotExist{token} } + for _, x := range []byte(token) { + if x < '0' || (x > '9' && x < 'a') || x > 'f' { + return nil, ErrAccessTokenNotExist{token} + } + } var tokens []AccessToken lastEight := token[len(token)-8:] err := x.Table(&AccessToken{}).Where("token_last_eight = ?", lastEight).Find(&tokens) diff --git a/models/topic.go b/models/topic.go index 31e81c8d0ff1..19c572fefebf 100644 --- a/models/topic.go +++ b/models/topic.go @@ -195,7 +195,7 @@ func FindTopics(opts *FindTopicOptions) (topics []*Topic, err error) { return topics, sess.Desc("topic.repo_count").Find(&topics) } -// GetRepoTopicByName retrives topic from name for a repo if it exist +// GetRepoTopicByName retrieves topic from name for a repo if it exist func GetRepoTopicByName(repoID int64, topicName string) (*Topic, error) { return getRepoTopicByName(x, repoID, topicName) } diff --git a/models/topic_test.go b/models/topic_test.go index b6ef8f565c2a..25232eb981c2 100644 --- a/models/topic_test.go +++ b/models/topic_test.go @@ -19,31 +19,31 @@ func TestAddTopic(t *testing.T) { topics, err := FindTopics(&FindTopicOptions{}) assert.NoError(t, err) - assert.EqualValues(t, totalNrOfTopics, len(topics)) + assert.Len(t, topics, totalNrOfTopics) topics, err = FindTopics(&FindTopicOptions{ ListOptions: ListOptions{Page: 1, PageSize: 2}, }) assert.NoError(t, err) - assert.EqualValues(t, 2, len(topics)) + assert.Len(t, topics, 2) topics, err = FindTopics(&FindTopicOptions{ RepoID: 1, }) assert.NoError(t, err) - assert.EqualValues(t, repo1NrOfTopics, len(topics)) + assert.Len(t, topics, repo1NrOfTopics) assert.NoError(t, SaveTopics(2, "golang")) repo2NrOfTopics = 1 topics, err = FindTopics(&FindTopicOptions{}) assert.NoError(t, err) - assert.EqualValues(t, totalNrOfTopics, len(topics)) + assert.Len(t, topics, totalNrOfTopics) topics, err = FindTopics(&FindTopicOptions{ RepoID: 2, }) assert.NoError(t, err) - assert.EqualValues(t, repo2NrOfTopics, len(topics)) + assert.Len(t, topics, repo2NrOfTopics) assert.NoError(t, SaveTopics(2, "golang", "gitea")) repo2NrOfTopics = 2 @@ -54,13 +54,13 @@ func TestAddTopic(t *testing.T) { topics, err = FindTopics(&FindTopicOptions{}) assert.NoError(t, err) - assert.EqualValues(t, totalNrOfTopics, len(topics)) + assert.Len(t, topics, totalNrOfTopics) topics, err = FindTopics(&FindTopicOptions{ RepoID: 2, }) assert.NoError(t, err) - assert.EqualValues(t, repo2NrOfTopics, len(topics)) + assert.Len(t, topics, repo2NrOfTopics) } func TestTopicValidator(t *testing.T) { diff --git a/models/twofactor.go b/models/twofactor.go index a84da8cdb52b..c19c5d120fd9 100644 --- a/models/twofactor.go +++ b/models/twofactor.go @@ -11,10 +11,10 @@ import ( "encoding/base64" "fmt" - "code.gitea.io/gitea/modules/generate" "code.gitea.io/gitea/modules/secret" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/util" "github.com/pquerna/otp/totp" "golang.org/x/crypto/pbkdf2" @@ -34,11 +34,11 @@ type TwoFactor struct { // GenerateScratchToken recreates the scratch token the user is using. func (t *TwoFactor) GenerateScratchToken() (string, error) { - token, err := generate.GetRandomString(8) + token, err := util.RandomString(8) if err != nil { return "", err } - t.ScratchSalt, _ = generate.GetRandomString(10) + t.ScratchSalt, _ = util.RandomString(10) t.ScratchHash = hashToken(token, t.ScratchSalt) return token, nil } diff --git a/models/unit_tests.go b/models/unit_tests.go index cefdae2cd6ae..f8d681933361 100644 --- a/models/unit_tests.go +++ b/models/unit_tests.go @@ -74,6 +74,8 @@ func MainTest(m *testing.M, pathToGiteaRoot string) { setting.RepoAvatar.Storage.Path = filepath.Join(setting.AppDataPath, "repo-avatars") + setting.RepoArchive.Storage.Path = filepath.Join(setting.AppDataPath, "repo-archive") + if err = storage.Init(); err != nil { fatalTestError("storage.Init: %v\n", err) } @@ -103,7 +105,7 @@ func CreateTestEngine(fixturesDir string) error { return err } x.SetMapper(names.GonicMapper{}) - if err = x.StoreEngine("InnoDB").Sync2(tables...); err != nil { + if err = syncTables(); err != nil { return err } switch os.Getenv("GITEA_UNIT_TESTS_VERBOSE") { diff --git a/models/user.go b/models/user.go index 26cfc0804e53..f606da53d65f 100644 --- a/models/user.go +++ b/models/user.go @@ -22,7 +22,6 @@ import ( "unicode/utf8" "code.gitea.io/gitea/modules/base" - "code.gitea.io/gitea/modules/generate" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" @@ -75,9 +74,6 @@ const ( ) var ( - // ErrEmailNotExist e-mail does not exist error - ErrEmailNotExist = errors.New("E-mail does not exist") - // ErrEmailNotActivated e-mail address has not been activated error ErrEmailNotActivated = errors.New("E-mail address has not been activated") @@ -116,7 +112,6 @@ type User struct { LoginName string Type UserType OwnedOrgs []*User `xorm:"-"` - Orgs []*User `xorm:"-"` Repos []*Repository `xorm:"-"` Location string Website string @@ -437,6 +432,62 @@ func (u *User) IsPasswordSet() bool { return len(u.Passwd) != 0 } +// IsVisibleToUser check if viewer is able to see user profile +func (u *User) IsVisibleToUser(viewer *User) bool { + return u.isVisibleToUser(x, viewer) +} + +func (u *User) isVisibleToUser(e Engine, viewer *User) bool { + if viewer != nil && viewer.IsAdmin { + return true + } + + switch u.Visibility { + case structs.VisibleTypePublic: + return true + case structs.VisibleTypeLimited: + if viewer == nil || viewer.IsRestricted { + return false + } + return true + case structs.VisibleTypePrivate: + if viewer == nil || viewer.IsRestricted { + return false + } + + // If they follow - they see each over + follower := IsFollowing(u.ID, viewer.ID) + if follower { + return true + } + + // Now we need to check if they in some organization together + count, err := x.Table("team_user"). + Where( + builder.And( + builder.Eq{"uid": viewer.ID}, + builder.Or( + builder.Eq{"org_id": u.ID}, + builder.In("org_id", + builder.Select("org_id"). + From("team_user", "t2"). + Where(builder.Eq{"uid": u.ID}))))). + Count(new(TeamUser)) + if err != nil { + return false + } + + if count < 0 { + // No common organization + return false + } + + // they are in an organization together + return true + } + return false +} + // IsOrganization returns true if user is actually a organization. func (u *User) IsOrganization() bool { return u.Type == UserTypeOrganization @@ -607,58 +658,6 @@ func (u *User) GetOwnedOrganizations() (err error) { return err } -// GetOrganizations returns paginated organizations that user belongs to. -// TODO: does not respect All and show orgs you privately participate -func (u *User) GetOrganizations(opts *SearchOrganizationsOptions) error { - sess := x.NewSession() - defer sess.Close() - - schema, err := x.TableInfo(new(User)) - if err != nil { - return err - } - groupByCols := &strings.Builder{} - for _, col := range schema.Columns() { - fmt.Fprintf(groupByCols, "`%s`.%s,", schema.Name, col.Name) - } - groupByStr := groupByCols.String() - groupByStr = groupByStr[0 : len(groupByStr)-1] - - sess.Select("`user`.*, count(repo_id) as org_count"). - Table("user"). - Join("INNER", "org_user", "`org_user`.org_id=`user`.id"). - Join("LEFT", builder. - Select("id as repo_id, owner_id as repo_owner_id"). - From("repository"). - Where(accessibleRepositoryCondition(u)), "`repository`.repo_owner_id = `org_user`.org_id"). - And("`org_user`.uid=?", u.ID). - GroupBy(groupByStr) - if opts.PageSize != 0 { - sess = opts.setSessionPagination(sess) - } - type OrgCount struct { - User `xorm:"extends"` - OrgCount int - } - orgCounts := make([]*OrgCount, 0, 10) - - if err := sess. - Asc("`user`.name"). - Find(&orgCounts); err != nil { - return err - } - - orgs := make([]*User, len(orgCounts)) - for i, orgCount := range orgCounts { - orgCount.User.NumRepos = orgCount.OrgCount - orgs[i] = &orgCount.User - } - - u.Orgs = orgs - - return nil -} - // DisplayName returns full name if it's not empty, // returns username otherwise. func (u *User) DisplayName() string { @@ -746,7 +745,7 @@ func IsUserExist(uid int64, name string) (bool, error) { // GetUserSalt returns a random user salt token. func GetUserSalt() (string, error) { - return generate.GetRandomString(10) + return util.RandomString(10) } // NewGhostUser creates and returns a fake user for someone has deleted his/her account. @@ -790,6 +789,7 @@ var ( "debug", "error", "explore", + "favicon.ico", "ghost", "help", "install", @@ -808,13 +808,13 @@ var ( "repo", "robots.txt", "search", + "serviceworker.js", "stars", "template", "user", - "favicon.ico", } - reservedUserPatterns = []string{"*.keys", "*.gpg"} + reservedUserPatterns = []string{"*.keys", "*.gpg", "*.rss", "*.atom"} ) // isUsableName checks if name is reserved or pattern of name is not allowed @@ -852,41 +852,47 @@ func IsUsableUsername(name string) error { return isUsableName(reservedUsernames, reservedUserPatterns, name) } +// CreateUserOverwriteOptions are an optional options who overwrite system defaults on user creation +type CreateUserOverwriteOptions struct { + Visibility structs.VisibleType +} + // CreateUser creates record of a new user. -func CreateUser(u *User) (err error) { +func CreateUser(u *User, overwriteDefault ...*CreateUserOverwriteOptions) (err error) { if err = IsUsableUsername(u.Name); err != nil { return err } + // set system defaults + u.KeepEmailPrivate = setting.Service.DefaultKeepEmailPrivate + u.Visibility = setting.Service.DefaultUserVisibilityMode + u.AllowCreateOrganization = setting.Service.DefaultAllowCreateOrganization && !setting.Admin.DisableRegularOrgCreation + u.EmailNotificationsPreference = setting.Admin.DefaultEmailNotification + u.MaxRepoCreation = -1 + u.Theme = setting.UI.DefaultTheme + + // overwrite defaults if set + if len(overwriteDefault) != 0 && overwriteDefault[0] != nil { + u.Visibility = overwriteDefault[0].Visibility + } + sess := x.NewSession() defer sess.Close() if err = sess.Begin(); err != nil { return err } - isExist, err := isUserExist(sess, 0, u.Name) - if err != nil { - return err - } else if isExist { - return ErrUserAlreadyExist{u.Name} - } + // validate data - if err = deleteUserRedirect(sess, u.Name); err != nil { + if err := validateUser(u); err != nil { return err } - u.Email = strings.ToLower(u.Email) - isExist, err = sess. - Where("email=?", u.Email). - Get(new(User)) + isExist, err := isUserExist(sess, 0, u.Name) if err != nil { return err } else if isExist { - return ErrEmailAlreadyUsed{u.Email} - } - - if err = ValidateEmail(u.Email); err != nil { - return err + return ErrUserAlreadyExist{u.Name} } isExist, err = isEmailUsed(sess, u.Email) @@ -896,7 +902,7 @@ func CreateUser(u *User) (err error) { return ErrEmailAlreadyUsed{u.Email} } - u.KeepEmailPrivate = setting.Service.DefaultKeepEmailPrivate + // prepare for database u.LowerName = strings.ToLower(u.Name) u.AvatarEmail = u.Email @@ -906,15 +912,28 @@ func CreateUser(u *User) (err error) { if err = u.SetPassword(u.Passwd); err != nil { return err } - u.AllowCreateOrganization = setting.Service.DefaultAllowCreateOrganization && !setting.Admin.DisableRegularOrgCreation - u.EmailNotificationsPreference = setting.Admin.DefaultEmailNotification - u.MaxRepoCreation = -1 - u.Theme = setting.UI.DefaultTheme + + // save changes to database + + if err = deleteUserRedirect(sess, u.Name); err != nil { + return err + } if _, err = sess.Insert(u); err != nil { return err } + // insert email address + if _, err := sess.Insert(&EmailAddress{ + UID: u.ID, + Email: u.Email, + LowerEmail: strings.ToLower(u.Email), + IsActivated: u.IsActive, + IsPrimary: true, + }); err != nil { + return err + } + return sess.Commit() } @@ -1008,7 +1027,7 @@ func ChangeUserName(u *User, newUserName string) (err error) { } // Do not fail if directory does not exist - if err = os.Rename(UserPath(oldUserName), UserPath(newUserName)); err != nil && !os.IsNotExist(err) { + if err = util.Rename(UserPath(oldUserName), UserPath(newUserName)); err != nil && !os.IsNotExist(err) { return fmt.Errorf("Rename user directory: %v", err) } @@ -1017,7 +1036,7 @@ func ChangeUserName(u *User, newUserName string) (err error) { } if err = sess.Commit(); err != nil { - if err2 := os.Rename(UserPath(newUserName), UserPath(oldUserName)); err2 != nil && !os.IsNotExist(err2) { + if err2 := util.Rename(UserPath(newUserName), UserPath(oldUserName)); err2 != nil && !os.IsNotExist(err2) { log.Critical("Unable to rollback directory change during failed username change from: %s to: %s. DB Error: %v. Filesystem Error: %v", oldUserName, newUserName, err, err2) return fmt.Errorf("failed to rollback directory change during failed username change from: %s to: %s. DB Error: %w. Filesystem Error: %v", oldUserName, newUserName, err, err2) } @@ -1043,12 +1062,22 @@ func checkDupEmail(e Engine, u *User) error { return nil } -func updateUser(e Engine, u *User) (err error) { +// validateUser check if user is valide to insert / update into database +func validateUser(u *User) error { + if !setting.Service.AllowedUserVisibilityModesSlice.IsAllowedVisibility(u.Visibility) { + return fmt.Errorf("visibility Mode not allowed: %s", u.Visibility.String()) + } + u.Email = strings.ToLower(u.Email) - if err = ValidateEmail(u.Email); err != nil { + return ValidateEmail(u.Email) +} + +func updateUser(e Engine, u *User) error { + if err := validateUser(u); err != nil { return err } - _, err = e.ID(u.ID).AllCols().Update(u) + + _, err := e.ID(u.ID).AllCols().Update(u) return err } @@ -1063,6 +1092,10 @@ func UpdateUserCols(u *User, cols ...string) error { } func updateUserCols(e Engine, u *User, cols ...string) error { + if err := validateUser(u); err != nil { + return err + } + _, err := e.ID(u.ID).Cols(cols...).Update(u) return err } @@ -1581,10 +1614,9 @@ func (opts *SearchUserOptions) toConds() builder.Cond { cond = cond.And(keywordCond) } + // If visibility filtered if len(opts.Visible) > 0 { cond = cond.And(builder.In("visibility", opts.Visible)) - } else { - cond = cond.And(builder.In("visibility", structs.VisibleTypePublic)) } if opts.Actor != nil { @@ -1597,16 +1629,27 @@ func (opts *SearchUserOptions) toConds() builder.Cond { exprCond = builder.Expr("org_user.org_id = \"user\".id") } - var accessCond builder.Cond - if !opts.Actor.IsRestricted { - accessCond = builder.Or( - builder.In("id", builder.Select("org_id").From("org_user").LeftJoin("`user`", exprCond).Where(builder.And(builder.Eq{"uid": opts.Actor.ID}, builder.Eq{"visibility": structs.VisibleTypePrivate}))), - builder.In("visibility", structs.VisibleTypePublic, structs.VisibleTypeLimited)) - } else { - // restricted users only see orgs they are a member of - accessCond = builder.In("id", builder.Select("org_id").From("org_user").LeftJoin("`user`", exprCond).Where(builder.And(builder.Eq{"uid": opts.Actor.ID}))) + // If Admin - they see all users! + if !opts.Actor.IsAdmin { + // Force visibility for privacy + var accessCond builder.Cond + if !opts.Actor.IsRestricted { + accessCond = builder.Or( + builder.In("id", builder.Select("org_id").From("org_user").LeftJoin("`user`", exprCond).Where(builder.And(builder.Eq{"uid": opts.Actor.ID}, builder.Eq{"visibility": structs.VisibleTypePrivate}))), + builder.In("visibility", structs.VisibleTypePublic, structs.VisibleTypeLimited)) + } else { + // restricted users only see orgs they are a member of + accessCond = builder.In("id", builder.Select("org_id").From("org_user").LeftJoin("`user`", exprCond).Where(builder.And(builder.Eq{"uid": opts.Actor.ID}))) + } + // Don't forget about self + accessCond = accessCond.Or(builder.Eq{"id": opts.Actor.ID}) + cond = cond.And(accessCond) } - cond = cond.And(accessCond) + + } else { + // Force visibility for privacy + // Not logged in - only public users + cond = cond.And(builder.In("visibility", structs.VisibleTypePublic)) } if opts.UID > 0 { diff --git a/models/user_heatmap.go b/models/user_heatmap.go index 0e2767212e9d..306bd1819b70 100644 --- a/models/user_heatmap.go +++ b/models/user_heatmap.go @@ -32,17 +32,14 @@ func getUserHeatmapData(user *User, team *Team, doer *User) ([]*UserHeatmapData, return hdata, nil } - var groupBy string + // Group by 15 minute intervals which will allow the client to accurately shift the timestamp to their timezone. + // The interval is based on the fact that there are timezones such as UTC +5:30 and UTC +12:45. + groupBy := "created_unix / 900 * 900" groupByName := "timestamp" // We need this extra case because mssql doesn't allow grouping by alias switch { - case setting.Database.UseSQLite3: - groupBy = "strftime('%s', strftime('%Y-%m-%d', created_unix, 'unixepoch'))" case setting.Database.UseMySQL: - groupBy = "UNIX_TIMESTAMP(DATE(FROM_UNIXTIME(created_unix)))" - case setting.Database.UsePostgreSQL: - groupBy = "extract(epoch from date_trunc('day', to_timestamp(created_unix)))" + groupBy = "created_unix DIV 900 * 900" case setting.Database.UseMSSQL: - groupBy = "datediff(SECOND, '19700101', dateadd(DAY, 0, datediff(day, 0, dateadd(s, created_unix, '19700101'))))" groupByName = groupBy } diff --git a/models/user_heatmap_test.go b/models/user_heatmap_test.go index 03d1755c9d05..a3309bb75b68 100644 --- a/models/user_heatmap_test.go +++ b/models/user_heatmap_test.go @@ -19,12 +19,20 @@ func TestGetUserHeatmapDataByUser(t *testing.T) { CountResult int JSONResult string }{ - {2, 2, 1, `[{"timestamp":1603152000,"contributions":1}]`}, // self looks at action in private repo - {2, 1, 1, `[{"timestamp":1603152000,"contributions":1}]`}, // admin looks at action in private repo - {2, 3, 0, `[]`}, // other user looks at action in private repo - {2, 0, 0, `[]`}, // nobody looks at action in private repo - {16, 15, 1, `[{"timestamp":1603238400,"contributions":1}]`}, // collaborator looks at action in private repo - {3, 3, 0, `[]`}, // no action action not performed by target user + // self looks at action in private repo + {2, 2, 1, `[{"timestamp":1603227600,"contributions":1}]`}, + // admin looks at action in private repo + {2, 1, 1, `[{"timestamp":1603227600,"contributions":1}]`}, + // other user looks at action in private repo + {2, 3, 0, `[]`}, + // nobody looks at action in private repo + {2, 0, 0, `[]`}, + // collaborator looks at action in private repo + {16, 15, 1, `[{"timestamp":1603267200,"contributions":1}]`}, + // no action action not performed by target user + {3, 3, 0, `[]`}, + // multiple actions performed with two grouped together + {10, 10, 3, `[{"timestamp":1603009800,"contributions":1},{"timestamp":1603010700,"contributions":2}]`}, } // Prepare assert.NoError(t, PrepareTestDatabase()) @@ -56,9 +64,13 @@ func TestGetUserHeatmapDataByUser(t *testing.T) { // Get the heatmap and compare heatmap, err := GetUserHeatmapDataByUser(user, doer) + var contributions int + for _, hm := range heatmap { + contributions += int(hm.Contributions) + } assert.NoError(t, err) - assert.Equal(t, len(actions), len(heatmap), "invalid action count: did the test data became too old?") - assert.Equal(t, tc.CountResult, len(heatmap), fmt.Sprintf("testcase %d", i)) + assert.Len(t, actions, contributions, "invalid action count: did the test data became too old?") + assert.Equal(t, tc.CountResult, contributions, fmt.Sprintf("testcase %d", i)) // Test JSON rendering json := jsoniter.ConfigCompatibleWithStandardLibrary diff --git a/models/user_mail.go b/models/user_mail.go index 1bdd6a423cc3..f8b084a0064b 100644 --- a/models/user_mail.go +++ b/models/user_mail.go @@ -17,14 +17,22 @@ import ( "xorm.io/builder" ) -// EmailAddress is the list of all email addresses of a user. Can contain the -// primary email address, but is not obligatory. +// EmailAddress is the list of all email addresses of a user. It also contains the +// primary email address which is saved in user table. type EmailAddress struct { ID int64 `xorm:"pk autoincr"` UID int64 `xorm:"INDEX NOT NULL"` Email string `xorm:"UNIQUE NOT NULL"` + LowerEmail string `xorm:"UNIQUE NOT NULL"` IsActivated bool - IsPrimary bool `xorm:"-"` + IsPrimary bool `xorm:"DEFAULT(false) NOT NULL"` +} + +// BeforeInsert will be invoked by XORM before inserting a record +func (email *EmailAddress) BeforeInsert() { + if email.LowerEmail == "" { + email.LowerEmail = strings.ToLower(email.Email) + } } // ValidateEmail check if email is a allowed address @@ -47,34 +55,10 @@ func GetEmailAddresses(uid int64) ([]*EmailAddress, error) { emails := make([]*EmailAddress, 0, 5) if err := x. Where("uid=?", uid). + Asc("id"). Find(&emails); err != nil { return nil, err } - - u, err := GetUserByID(uid) - if err != nil { - return nil, err - } - - isPrimaryFound := false - for _, email := range emails { - if email.Email == u.Email { - isPrimaryFound = true - email.IsPrimary = true - } else { - email.IsPrimary = false - } - } - - // We always want the primary email address displayed, even if it's not in - // the email address table (yet). - if !isPrimaryFound { - emails = append(emails, &EmailAddress{ - Email: u.Email, - IsActivated: u.IsActive, - IsPrimary: true, - }) - } return emails, nil } @@ -90,40 +74,24 @@ func GetEmailAddressByID(uid, id int64) (*EmailAddress, error) { return email, nil } -func isEmailActive(e Engine, email string, userID, emailID int64) (bool, error) { +// isEmailActive check if email is activated with a different emailID +func isEmailActive(e Engine, email string, excludeEmailID int64) (bool, error) { if len(email) == 0 { return true, nil } // Can't filter by boolean field unless it's explicit cond := builder.NewCond() - cond = cond.And(builder.Eq{"email": email}, builder.Neq{"id": emailID}) + cond = cond.And(builder.Eq{"lower_email": strings.ToLower(email)}, builder.Neq{"id": excludeEmailID}) if setting.Service.RegisterEmailConfirm { // Inactive (unvalidated) addresses don't count as active if email validation is required cond = cond.And(builder.Eq{"is_activated": true}) } - em := EmailAddress{} - + var em EmailAddress if has, err := e.Where(cond).Get(&em); has || err != nil { if has { - log.Info("isEmailActive('%s',%d,%d) found duplicate in email ID %d", email, userID, emailID, em.ID) - } - return has, err - } - - // Can't filter by boolean field unless it's explicit - cond = builder.NewCond() - cond = cond.And(builder.Eq{"email": email}, builder.Neq{"id": userID}) - if setting.Service.RegisterEmailConfirm { - cond = cond.And(builder.Eq{"is_active": true}) - } - - us := User{} - - if has, err := e.Where(cond).Get(&us); has || err != nil { - if has { - log.Info("isEmailActive('%s',%d,%d) found duplicate in user ID %d", email, userID, emailID, us.ID) + log.Info("isEmailActive(%q, %d) found duplicate in email ID %d", email, excludeEmailID, em.ID) } return has, err } @@ -136,7 +104,7 @@ func isEmailUsed(e Engine, email string) (bool, error) { return true, nil } - return e.Where("email=?", email).Get(&EmailAddress{}) + return e.Where("lower_email=?", strings.ToLower(email)).Get(&EmailAddress{}) } // IsEmailUsed returns true if the email has been used. @@ -145,7 +113,7 @@ func IsEmailUsed(email string) (bool, error) { } func addEmailAddress(e Engine, email *EmailAddress) error { - email.Email = strings.ToLower(strings.TrimSpace(email.Email)) + email.Email = strings.TrimSpace(email.Email) used, err := isEmailUsed(e, email.Email) if err != nil { return err @@ -174,7 +142,7 @@ func AddEmailAddresses(emails []*EmailAddress) error { // Check if any of them has been used for i := range emails { - emails[i].Email = strings.ToLower(strings.TrimSpace(emails[i].Email)) + emails[i].Email = strings.TrimSpace(emails[i].Email) used, err := IsEmailUsed(emails[i].Email) if err != nil { return err @@ -223,6 +191,10 @@ func (email *EmailAddress) updateActivation(e Engine, activate bool) error { // DeleteEmailAddress deletes an email address of given user. func DeleteEmailAddress(email *EmailAddress) (err error) { + if email.IsPrimary { + return ErrPrimaryEmailCannotDelete{Email: email.Email} + } + var deleted int64 // ask to check UID address := EmailAddress{ @@ -231,8 +203,11 @@ func DeleteEmailAddress(email *EmailAddress) (err error) { if email.ID > 0 { deleted, err = x.ID(email.ID).Delete(&address) } else { + if email.Email != "" && email.LowerEmail == "" { + email.LowerEmail = strings.ToLower(email.Email) + } deleted, err = x. - Where("email=?", email.Email). + Where("lower_email=?", email.LowerEmail). Delete(&address) } @@ -261,7 +236,7 @@ func MakeEmailPrimary(email *EmailAddress) error { if err != nil { return err } else if !has { - return ErrEmailNotExist + return ErrEmailAddressNotExist{Email: email.Email} } if !email.IsActivated { @@ -276,32 +251,31 @@ func MakeEmailPrimary(email *EmailAddress) error { return ErrUserNotExist{email.UID, "", 0} } - // Make sure the former primary email doesn't disappear. - formerPrimaryEmail := &EmailAddress{UID: user.ID, Email: user.Email} - has, err = x.Get(formerPrimaryEmail) - if err != nil { - return err - } - sess := x.NewSession() defer sess.Close() if err = sess.Begin(); err != nil { return err } - if !has { - formerPrimaryEmail.UID = user.ID - formerPrimaryEmail.IsActivated = user.IsActive - if _, err = sess.Insert(formerPrimaryEmail); err != nil { - return err - } - } - + // 1. Update user table user.Email = email.Email if _, err = sess.ID(user.ID).Cols("email").Update(user); err != nil { return err } + // 2. Update old primary email + if _, err = sess.Where("uid=? AND is_primary=?", email.UID, true).Cols("is_primary").Update(&EmailAddress{ + IsPrimary: false, + }); err != nil { + return err + } + + // 3. update new primary email + email.IsPrimary = true + if _, err = sess.ID(email.ID).Cols("is_primary").Update(email); err != nil { + return err + } + return sess.Commit() } @@ -314,10 +288,10 @@ func (s SearchEmailOrderBy) String() string { // Strings for sorting result const ( - SearchEmailOrderByEmail SearchEmailOrderBy = "emails.email ASC, is_primary DESC, sortid ASC" - SearchEmailOrderByEmailReverse SearchEmailOrderBy = "emails.email DESC, is_primary ASC, sortid DESC" - SearchEmailOrderByName SearchEmailOrderBy = "`user`.lower_name ASC, is_primary DESC, sortid ASC" - SearchEmailOrderByNameReverse SearchEmailOrderBy = "`user`.lower_name DESC, is_primary ASC, sortid DESC" + SearchEmailOrderByEmail SearchEmailOrderBy = "email_address.lower_email ASC, email_address.is_primary DESC, email_address.id ASC" + SearchEmailOrderByEmailReverse SearchEmailOrderBy = "email_address.lower_email DESC, email_address.is_primary ASC, email_address.id DESC" + SearchEmailOrderByName SearchEmailOrderBy = "`user`.lower_name ASC, email_address.is_primary DESC, email_address.id ASC" + SearchEmailOrderByNameReverse SearchEmailOrderBy = "`user`.lower_name DESC, email_address.is_primary ASC, email_address.id DESC" ) // SearchEmailOptions are options to search e-mail addresses for the admin panel @@ -343,54 +317,32 @@ type SearchEmailResult struct { // SearchEmails takes options i.e. keyword and part of email name to search, // it returns results in given range and number of total results. func SearchEmails(opts *SearchEmailOptions) ([]*SearchEmailResult, int64, error) { - // Unfortunately, UNION support for SQLite in xorm is currently broken, so we must - // build the SQL ourselves. - where := make([]string, 0, 5) - args := make([]interface{}, 0, 5) - - emailsSQL := "(SELECT id as sortid, uid, email, is_activated, 0 as is_primary " + - "FROM email_address " + - "UNION ALL " + - "SELECT id as sortid, id AS uid, email, is_active AS is_activated, 1 as is_primary " + - "FROM `user` " + - "WHERE type = ?) AS emails" - args = append(args, UserTypeIndividual) - + var cond builder.Cond = builder.Eq{"`user`.`type`": UserTypeIndividual} if len(opts.Keyword) > 0 { - // Note: % can be injected in the Keyword parameter, but it won't do any harm. - where = append(where, "(lower(`user`.full_name) LIKE ? OR `user`.lower_name LIKE ? OR emails.email LIKE ?)") likeStr := "%" + strings.ToLower(opts.Keyword) + "%" - args = append(args, likeStr) - args = append(args, likeStr) - args = append(args, likeStr) + cond = cond.And(builder.Or( + builder.Like{"lower(`user`.full_name)", likeStr}, + builder.Like{"`user`.lower_name", likeStr}, + builder.Like{"email_address.lower_email", likeStr}, + )) } switch { case opts.IsPrimary.IsTrue(): - where = append(where, "emails.is_primary = ?") - args = append(args, true) + cond = cond.And(builder.Eq{"email_address.is_primary": true}) case opts.IsPrimary.IsFalse(): - where = append(where, "emails.is_primary = ?") - args = append(args, false) + cond = cond.And(builder.Eq{"email_address.is_primary": false}) } switch { case opts.IsActivated.IsTrue(): - where = append(where, "emails.is_activated = ?") - args = append(args, true) + cond = cond.And(builder.Eq{"email_address.is_activated": true}) case opts.IsActivated.IsFalse(): - where = append(where, "emails.is_activated = ?") - args = append(args, false) - } - - var whereStr string - if len(where) > 0 { - whereStr = "WHERE " + strings.Join(where, " AND ") + cond = cond.And(builder.Eq{"email_address.is_activated": false}) } - joinSQL := "FROM " + emailsSQL + " INNER JOIN `user` ON `user`.id = emails.uid " + whereStr - - count, err := x.SQL("SELECT count(*) "+joinSQL, args...).Count() + count, err := x.Join("INNER", "`user`", "`user`.ID = email_address.uid"). + Where(cond).Count(new(EmailAddress)) if err != nil { return nil, 0, fmt.Errorf("Count: %v", err) } @@ -400,97 +352,71 @@ func SearchEmails(opts *SearchEmailOptions) ([]*SearchEmailResult, int64, error) orderby = SearchEmailOrderByEmail.String() } - querySQL := "SELECT emails.uid, emails.email, emails.is_activated, emails.is_primary, " + - "`user`.name, `user`.full_name " + joinSQL + " ORDER BY " + orderby - opts.setDefaultValues() - rows, err := x.SQL(querySQL, args...).Rows(new(SearchEmailResult)) - if err != nil { - return nil, 0, fmt.Errorf("Emails: %v", err) - } - - // Page manually because xorm can't handle Limit() with raw SQL - defer rows.Close() - emails := make([]*SearchEmailResult, 0, opts.PageSize) - skip := (opts.Page - 1) * opts.PageSize - - for rows.Next() { - var email SearchEmailResult - if err := rows.Scan(&email); err != nil { - return nil, 0, err - } - if skip > 0 { - skip-- - continue - } - emails = append(emails, &email) - if len(emails) == opts.PageSize { - break - } - } + err = x.Table("email_address"). + Select("email_address.*, `user`.name, `user`.full_name"). + Join("INNER", "`user`", "`user`.ID = email_address.uid"). + Where(cond). + OrderBy(orderby). + Limit(opts.PageSize, (opts.Page-1)*opts.PageSize). + Find(&emails) return emails, count, err } // ActivateUserEmail will change the activated state of an email address, -// either primary (in the user table) or secondary (in the email_address table) -func ActivateUserEmail(userID int64, email string, primary, activate bool) (err error) { +// either primary or secondary (all in the email_address table) +func ActivateUserEmail(userID int64, email string, activate bool) (err error) { sess := x.NewSession() defer sess.Close() if err = sess.Begin(); err != nil { return err } - if primary { - // Activate/deactivate a user's primary email address + + // Activate/deactivate a user's secondary email address + // First check if there's another user active with the same address + addr := EmailAddress{UID: userID, LowerEmail: strings.ToLower(email)} + if has, err := sess.Get(&addr); err != nil { + return err + } else if !has { + return fmt.Errorf("no such email: %d (%s)", userID, email) + } + if addr.IsActivated == activate { + // Already in the desired state; no action + return nil + } + if activate { + if used, err := isEmailActive(sess, email, addr.ID); err != nil { + return fmt.Errorf("unable to check isEmailActive() for %s: %v", email, err) + } else if used { + return ErrEmailAlreadyUsed{Email: email} + } + } + if err = addr.updateActivation(sess, activate); err != nil { + return fmt.Errorf("unable to updateActivation() for %d:%s: %w", addr.ID, addr.Email, err) + } + + // Activate/deactivate a user's primary email address and account + if addr.IsPrimary { user := User{ID: userID, Email: email} if has, err := sess.Get(&user); err != nil { return err } else if !has { - return fmt.Errorf("no such user: %d (%s)", userID, email) + return fmt.Errorf("no user with ID: %d and Email: %s", userID, email) } - if user.IsActive == activate { - // Already in the desired state; no action - return nil - } - if activate { - if used, err := isEmailActive(sess, email, userID, 0); err != nil { - return fmt.Errorf("isEmailActive(): %v", err) - } else if used { - return ErrEmailAlreadyUsed{Email: email} + // The user's activation state should be synchronized with the primary email + if user.IsActive != activate { + user.IsActive = activate + if user.Rands, err = GetUserSalt(); err != nil { + return fmt.Errorf("unable to generate salt: %v", err) } - } - user.IsActive = activate - if user.Rands, err = GetUserSalt(); err != nil { - return fmt.Errorf("generate salt: %v", err) - } - if err = updateUserCols(sess, &user, "is_active", "rands"); err != nil { - return fmt.Errorf("updateUserCols(): %v", err) - } - } else { - // Activate/deactivate a user's secondary email address - // First check if there's another user active with the same address - addr := EmailAddress{UID: userID, Email: email} - if has, err := sess.Get(&addr); err != nil { - return err - } else if !has { - return fmt.Errorf("no such email: %d (%s)", userID, email) - } - if addr.IsActivated == activate { - // Already in the desired state; no action - return nil - } - if activate { - if used, err := isEmailActive(sess, email, 0, addr.ID); err != nil { - return fmt.Errorf("isEmailActive(): %v", err) - } else if used { - return ErrEmailAlreadyUsed{Email: email} + if err = updateUserCols(sess, &user, "is_active", "rands"); err != nil { + return fmt.Errorf("unable to updateUserCols() for user ID: %d: %v", userID, err) } } - if err = addr.updateActivation(sess, activate); err != nil { - return fmt.Errorf("updateActivation(): %v", err) - } } + return sess.Commit() } diff --git a/models/user_mail_test.go b/models/user_mail_test.go index 8237ce66426f..829a38c18dbb 100644 --- a/models/user_mail_test.go +++ b/models/user_mail_test.go @@ -17,9 +17,9 @@ func TestGetEmailAddresses(t *testing.T) { emails, _ := GetEmailAddresses(int64(1)) if assert.Len(t, emails, 3) { - assert.False(t, emails[0].IsPrimary) + assert.True(t, emails[0].IsPrimary) assert.True(t, emails[2].IsActivated) - assert.True(t, emails[2].IsPrimary) + assert.False(t, emails[2].IsPrimary) } emails, _ = GetEmailAddresses(int64(2)) @@ -45,13 +45,15 @@ func TestAddEmailAddress(t *testing.T) { assert.NoError(t, AddEmailAddress(&EmailAddress{ Email: "user1234567890@example.com", + LowerEmail: "user1234567890@example.com", IsPrimary: true, IsActivated: true, })) // ErrEmailAlreadyUsed err := AddEmailAddress(&EmailAddress{ - Email: "user1234567890@example.com", + Email: "user1234567890@example.com", + LowerEmail: "user1234567890@example.com", }) assert.Error(t, err) assert.True(t, IsErrEmailAlreadyUsed(err)) @@ -64,10 +66,12 @@ func TestAddEmailAddresses(t *testing.T) { emails := make([]*EmailAddress, 2) emails[0] = &EmailAddress{ Email: "user1234@example.com", + LowerEmail: "user1234@example.com", IsActivated: true, } emails[1] = &EmailAddress{ Email: "user5678@example.com", + LowerEmail: "user5678@example.com", IsActivated: true, } assert.NoError(t, AddEmailAddresses(emails)) @@ -82,20 +86,23 @@ func TestDeleteEmailAddress(t *testing.T) { assert.NoError(t, PrepareTestDatabase()) assert.NoError(t, DeleteEmailAddress(&EmailAddress{ - UID: int64(1), - ID: int64(1), - Email: "user11@example.com", + UID: int64(1), + ID: int64(33), + Email: "user1-2@example.com", + LowerEmail: "user1-2@example.com", })) assert.NoError(t, DeleteEmailAddress(&EmailAddress{ - UID: int64(1), - Email: "user12@example.com", + UID: int64(1), + Email: "user1-3@example.com", + LowerEmail: "user1-3@example.com", })) // Email address does not exist err := DeleteEmailAddress(&EmailAddress{ - UID: int64(1), - Email: "user1234567890@example.com", + UID: int64(1), + Email: "user1234567890@example.com", + LowerEmail: "user1234567890@example.com", }) assert.Error(t, err) } @@ -106,13 +113,15 @@ func TestDeleteEmailAddresses(t *testing.T) { // delete multiple email address emails := make([]*EmailAddress, 2) emails[0] = &EmailAddress{ - UID: int64(2), - ID: int64(3), - Email: "user2@example.com", + UID: int64(2), + ID: int64(3), + Email: "user2@example.com", + LowerEmail: "user2@example.com", } emails[1] = &EmailAddress{ - UID: int64(2), - Email: "user21@example.com", + UID: int64(2), + Email: "user2-2@example.com", + LowerEmail: "user2-2@example.com", } assert.NoError(t, DeleteEmailAddresses(emails)) @@ -129,14 +138,14 @@ func TestMakeEmailPrimary(t *testing.T) { } err := MakeEmailPrimary(email) assert.Error(t, err) - assert.Equal(t, ErrEmailNotExist.Error(), err.Error()) + assert.EqualError(t, err, ErrEmailAddressNotExist{email.Email}.Error()) email = &EmailAddress{ Email: "user11@example.com", } err = MakeEmailPrimary(email) assert.Error(t, err) - assert.Equal(t, ErrEmailNotActivated.Error(), err.Error()) + assert.EqualError(t, err, ErrEmailNotActivated.Error()) email = &EmailAddress{ Email: "user9999999@example.com", @@ -168,15 +177,21 @@ func TestActivate(t *testing.T) { emails, _ := GetEmailAddresses(int64(1)) assert.Len(t, emails, 3) assert.True(t, emails[0].IsActivated) + assert.True(t, emails[0].IsPrimary) + assert.False(t, emails[1].IsPrimary) assert.True(t, emails[2].IsActivated) - assert.True(t, emails[2].IsPrimary) + assert.False(t, emails[2].IsPrimary) } func TestListEmails(t *testing.T) { assert.NoError(t, PrepareTestDatabase()) // Must find all users and their emails - opts := &SearchEmailOptions{} + opts := &SearchEmailOptions{ + ListOptions: ListOptions{ + PageSize: 10000, + }, + } emails, count, err := SearchEmails(opts) assert.NoError(t, err) assert.NotEqual(t, int64(0), count) @@ -232,6 +247,6 @@ func TestListEmails(t *testing.T) { } emails, count, err = SearchEmails(opts) assert.NoError(t, err) - assert.Equal(t, 5, len(emails)) - assert.True(t, count > int64(len(emails))) + assert.Len(t, emails, 5) + assert.Greater(t, count, int64(len(emails))) } diff --git a/models/user_openid_test.go b/models/user_openid_test.go index 18f84bef760b..d04b072279c4 100644 --- a/models/user_openid_test.go +++ b/models/user_openid_test.go @@ -15,15 +15,15 @@ func TestGetUserOpenIDs(t *testing.T) { oids, err := GetUserOpenIDs(int64(1)) if assert.NoError(t, err) && assert.Len(t, oids, 2) { - assert.Equal(t, oids[0].URI, "https://user1.domain1.tld/") + assert.Equal(t, "https://user1.domain1.tld/", oids[0].URI) assert.False(t, oids[0].Show) - assert.Equal(t, oids[1].URI, "http://user1.domain2.tld/") + assert.Equal(t, "http://user1.domain2.tld/", oids[1].URI) assert.True(t, oids[1].Show) } oids, err = GetUserOpenIDs(int64(2)) if assert.NoError(t, err) && assert.Len(t, oids, 1) { - assert.Equal(t, oids[0].URI, "https://domain1.tld/user2/") + assert.Equal(t, "https://domain1.tld/user2/", oids[0].URI) assert.True(t, oids[0].Show) } } @@ -38,12 +38,12 @@ func TestGetUserByOpenID(t *testing.T) { user, err := GetUserByOpenID("https://user1.domain1.tld") if assert.NoError(t, err) { - assert.Equal(t, user.ID, int64(1)) + assert.Equal(t, int64(1), user.ID) } user, err = GetUserByOpenID("https://domain1.tld/user2/") if assert.NoError(t, err) { - assert.Equal(t, user.ID, int64(2)) + assert.Equal(t, int64(2), user.ID) } } diff --git a/models/user_test.go b/models/user_test.go index 02319e43500b..34c465c58649 100644 --- a/models/user_test.go +++ b/models/user_test.go @@ -11,6 +11,7 @@ import ( "testing" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/util" "github.com/stretchr/testify/assert" @@ -189,6 +190,7 @@ func TestDeleteUser(t *testing.T) { func TestEmailNotificationPreferences(t *testing.T) { assert.NoError(t, PrepareTestDatabase()) + for _, test := range []struct { expected string userID int64 @@ -368,6 +370,8 @@ func TestCreateUser_Issue5882(t *testing.T) { } func TestGetUserIDsByNames(t *testing.T) { + assert.NoError(t, PrepareTestDatabase()) + // ignore non existing IDs, err := GetUserIDsByNames([]string{"user1", "user2", "none_existing_user"}, true) assert.NoError(t, err) @@ -380,16 +384,18 @@ func TestGetUserIDsByNames(t *testing.T) { } func TestGetMaileableUsersByIDs(t *testing.T) { + assert.NoError(t, PrepareTestDatabase()) + results, err := GetMaileableUsersByIDs([]int64{1, 4}, false) assert.NoError(t, err) - assert.Equal(t, 1, len(results)) + assert.Len(t, results, 1) if len(results) > 1 { assert.Equal(t, results[0].ID, 1) } results, err = GetMaileableUsersByIDs([]int64{1, 4}, true) assert.NoError(t, err) - assert.Equal(t, 2, len(results)) + assert.Len(t, results, 2) if len(results) > 2 { assert.Equal(t, results[0].ID, 1) assert.Equal(t, results[1].ID, 4) @@ -453,7 +459,7 @@ ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ib if err != nil { continue } - assert.Equal(t, kase.number, len(keys)) + assert.Len(t, keys, kase.number) for _, key := range keys { assert.Contains(t, kase.keyContents, key.Content) @@ -463,3 +469,23 @@ ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ib } } } + +func TestUpdateUser(t *testing.T) { + assert.NoError(t, PrepareTestDatabase()) + user := AssertExistsAndLoadBean(t, &User{ID: 2}).(*User) + + user.KeepActivityPrivate = true + assert.NoError(t, UpdateUser(user)) + user = AssertExistsAndLoadBean(t, &User{ID: 2}).(*User) + assert.True(t, user.KeepActivityPrivate) + + setting.Service.AllowedUserVisibilityModesSlice = []bool{true, false, false} + user.KeepActivityPrivate = false + user.Visibility = structs.VisibleTypePrivate + assert.Error(t, UpdateUser(user)) + user = AssertExistsAndLoadBean(t, &User{ID: 2}).(*User) + assert.True(t, user.KeepActivityPrivate) + + user.Email = "no mail@mail.org" + assert.Error(t, UpdateUser(user)) +} diff --git a/models/webhook.go b/models/webhook.go index 24510cc6f757..29cfcf6ed4f6 100644 --- a/models/webhook.go +++ b/models/webhook.go @@ -109,6 +109,22 @@ type HookEvent struct { HookEvents `json:"events"` } +// HookType is the type of a webhook +type HookType = string + +// Types of webhooks +const ( + GITEA HookType = "gitea" + GOGS HookType = "gogs" + SLACK HookType = "slack" + DISCORD HookType = "discord" + DINGTALK HookType = "dingtalk" + TELEGRAM HookType = "telegram" + MSTEAMS HookType = "msteams" + FEISHU HookType = "feishu" + MATRIX HookType = "matrix" +) + // HookStatus is the status of a web hook type HookStatus int @@ -126,17 +142,15 @@ type Webhook struct { OrgID int64 `xorm:"INDEX"` IsSystemWebhook bool URL string `xorm:"url TEXT"` - Signature string `xorm:"TEXT"` HTTPMethod string `xorm:"http_method"` ContentType HookContentType Secret string `xorm:"TEXT"` Events string `xorm:"TEXT"` *HookEvent `xorm:"-"` - IsSSL bool `xorm:"is_ssl"` - IsActive bool `xorm:"INDEX"` - Type HookTaskType `xorm:"VARCHAR(16) 'type'"` - Meta string `xorm:"TEXT"` // store hook-specific attributes - LastStatus HookStatus // Last delivery status + IsActive bool `xorm:"INDEX"` + Type HookType `xorm:"VARCHAR(16) 'type'"` + Meta string `xorm:"TEXT"` // store hook-specific attributes + LastStatus HookStatus // Last delivery status CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` @@ -558,22 +572,6 @@ func copyDefaultWebhooksToRepo(e Engine, repoID int64) error { // \___|_ / \____/ \____/|__|_ \ |____| (____ /____ >__|_ \ // \/ \/ \/ \/ \/ -// HookTaskType is the type of an hook task -type HookTaskType = string - -// Types of hook tasks -const ( - GITEA HookTaskType = "gitea" - GOGS HookTaskType = "gogs" - SLACK HookTaskType = "slack" - DISCORD HookTaskType = "discord" - DINGTALK HookTaskType = "dingtalk" - TELEGRAM HookTaskType = "telegram" - MSTEAMS HookTaskType = "msteams" - FEISHU HookTaskType = "feishu" - MATRIX HookTaskType = "matrix" -) - // HookEventType is the type of an hook event type HookEventType string @@ -635,7 +633,9 @@ func (h HookEventType) Event() string { // HookRequest represents hook task request information. type HookRequest struct { - Headers map[string]string `json:"headers"` + URL string `json:"url"` + HTTPMethod string `json:"http_method"` + Headers map[string]string `json:"headers"` } // HookResponse represents hook task response information. @@ -651,15 +651,9 @@ type HookTask struct { RepoID int64 `xorm:"INDEX"` HookID int64 UUID string - Typ HookTaskType `xorm:"VARCHAR(16) index"` - URL string `xorm:"TEXT"` - Signature string `xorm:"TEXT"` api.Payloader `xorm:"-"` PayloadContent string `xorm:"TEXT"` - HTTPMethod string `xorm:"http_method"` - ContentType HookContentType EventType HookEventType - IsSSL bool IsDelivered bool Delivered int64 DeliveredString string `xorm:"-"` diff --git a/models/webhook_test.go b/models/webhook_test.go index 88b2d40a3920..cab44a120e3e 100644 --- a/models/webhook_test.go +++ b/models/webhook_test.go @@ -207,8 +207,6 @@ func TestCreateHookTask(t *testing.T) { hookTask := &HookTask{ RepoID: 3, HookID: 3, - Typ: GITEA, - URL: "http://www.example.com/unit_test", Payloader: &api.PushPayload{}, } AssertNotExistsBean(t, hookTask) @@ -233,8 +231,6 @@ func TestCleanupHookTaskTable_PerWebhook_DeletesDelivered(t *testing.T) { hookTask := &HookTask{ RepoID: 3, HookID: 3, - Typ: GITEA, - URL: "http://www.example.com/unit_test", Payloader: &api.PushPayload{}, IsDelivered: true, Delivered: time.Now().UnixNano(), @@ -252,8 +248,6 @@ func TestCleanupHookTaskTable_PerWebhook_LeavesUndelivered(t *testing.T) { hookTask := &HookTask{ RepoID: 2, HookID: 4, - Typ: GITEA, - URL: "http://www.example.com/unit_test", Payloader: &api.PushPayload{}, IsDelivered: false, } @@ -270,8 +264,6 @@ func TestCleanupHookTaskTable_PerWebhook_LeavesMostRecentTask(t *testing.T) { hookTask := &HookTask{ RepoID: 2, HookID: 4, - Typ: GITEA, - URL: "http://www.example.com/unit_test", Payloader: &api.PushPayload{}, IsDelivered: true, Delivered: time.Now().UnixNano(), @@ -289,8 +281,6 @@ func TestCleanupHookTaskTable_OlderThan_DeletesDelivered(t *testing.T) { hookTask := &HookTask{ RepoID: 3, HookID: 3, - Typ: GITEA, - URL: "http://www.example.com/unit_test", Payloader: &api.PushPayload{}, IsDelivered: true, Delivered: time.Now().AddDate(0, 0, -8).UnixNano(), @@ -308,8 +298,6 @@ func TestCleanupHookTaskTable_OlderThan_LeavesUndelivered(t *testing.T) { hookTask := &HookTask{ RepoID: 2, HookID: 4, - Typ: GITEA, - URL: "http://www.example.com/unit_test", Payloader: &api.PushPayload{}, IsDelivered: false, } @@ -326,8 +314,6 @@ func TestCleanupHookTaskTable_OlderThan_LeavesTaskEarlierThanAgeToDelete(t *test hookTask := &HookTask{ RepoID: 2, HookID: 4, - Typ: GITEA, - URL: "http://www.example.com/unit_test", Payloader: &api.PushPayload{}, IsDelivered: true, Delivered: time.Now().AddDate(0, 0, -6).UnixNano(), diff --git a/modules/auth/ldap/ldap.go b/modules/auth/ldap/ldap.go index 6c557de018c4..91ad33a60f3a 100644 --- a/modules/auth/ldap/ldap.go +++ b/modules/auth/ldap/ldap.go @@ -35,6 +35,7 @@ type Source struct { SecurityProtocol SecurityProtocol SkipVerify bool BindDN string // DN to bind with + BindPasswordEncrypt string // Encrypted Bind BN password BindPassword string // Bind DN password UserBase string // Base search path for users UserDN string // Template for the DN of the user for simple auth diff --git a/modules/auth/oauth2/jwtsigningkey.go b/modules/auth/oauth2/jwtsigningkey.go new file mode 100644 index 000000000000..75e62a7c4303 --- /dev/null +++ b/modules/auth/oauth2/jwtsigningkey.go @@ -0,0 +1,378 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package oauth2 + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "crypto/rsa" + "crypto/sha256" + "crypto/x509" + "encoding/base64" + "encoding/pem" + "fmt" + "io/ioutil" + "math/big" + "os" + "path/filepath" + "strings" + + "code.gitea.io/gitea/modules/generate" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" + + "github.com/dgrijalva/jwt-go" + ini "gopkg.in/ini.v1" +) + +// ErrInvalidAlgorithmType represents an invalid algorithm error. +type ErrInvalidAlgorithmType struct { + Algorightm string +} + +func (err ErrInvalidAlgorithmType) Error() string { + return fmt.Sprintf("JWT signing algorithm is not supported: %s", err.Algorightm) +} + +// JWTSigningKey represents a algorithm/key pair to sign JWTs +type JWTSigningKey interface { + IsSymmetric() bool + SigningMethod() jwt.SigningMethod + SignKey() interface{} + VerifyKey() interface{} + ToJWK() (map[string]string, error) + PreProcessToken(*jwt.Token) +} + +type hmacSigningKey struct { + signingMethod jwt.SigningMethod + secret []byte +} + +func (key hmacSigningKey) IsSymmetric() bool { + return true +} + +func (key hmacSigningKey) SigningMethod() jwt.SigningMethod { + return key.signingMethod +} + +func (key hmacSigningKey) SignKey() interface{} { + return key.secret +} + +func (key hmacSigningKey) VerifyKey() interface{} { + return key.secret +} + +func (key hmacSigningKey) ToJWK() (map[string]string, error) { + return map[string]string{ + "kty": "oct", + "alg": key.SigningMethod().Alg(), + }, nil +} + +func (key hmacSigningKey) PreProcessToken(*jwt.Token) {} + +type rsaSingingKey struct { + signingMethod jwt.SigningMethod + key *rsa.PrivateKey + id string +} + +func newRSASingingKey(signingMethod jwt.SigningMethod, key *rsa.PrivateKey) (rsaSingingKey, error) { + kid, err := createPublicKeyFingerprint(key.Public().(*rsa.PublicKey)) + if err != nil { + return rsaSingingKey{}, err + } + + return rsaSingingKey{ + signingMethod, + key, + base64.RawURLEncoding.EncodeToString(kid), + }, nil +} + +func (key rsaSingingKey) IsSymmetric() bool { + return false +} + +func (key rsaSingingKey) SigningMethod() jwt.SigningMethod { + return key.signingMethod +} + +func (key rsaSingingKey) SignKey() interface{} { + return key.key +} + +func (key rsaSingingKey) VerifyKey() interface{} { + return key.key.Public() +} + +func (key rsaSingingKey) ToJWK() (map[string]string, error) { + pubKey := key.key.Public().(*rsa.PublicKey) + + return map[string]string{ + "kty": "RSA", + "alg": key.SigningMethod().Alg(), + "kid": key.id, + "e": base64.RawURLEncoding.EncodeToString(big.NewInt(int64(pubKey.E)).Bytes()), + "n": base64.RawURLEncoding.EncodeToString(pubKey.N.Bytes()), + }, nil +} + +func (key rsaSingingKey) PreProcessToken(token *jwt.Token) { + token.Header["kid"] = key.id +} + +type ecdsaSingingKey struct { + signingMethod jwt.SigningMethod + key *ecdsa.PrivateKey + id string +} + +func newECDSASingingKey(signingMethod jwt.SigningMethod, key *ecdsa.PrivateKey) (ecdsaSingingKey, error) { + kid, err := createPublicKeyFingerprint(key.Public().(*ecdsa.PublicKey)) + if err != nil { + return ecdsaSingingKey{}, err + } + + return ecdsaSingingKey{ + signingMethod, + key, + base64.RawURLEncoding.EncodeToString(kid), + }, nil +} + +func (key ecdsaSingingKey) IsSymmetric() bool { + return false +} + +func (key ecdsaSingingKey) SigningMethod() jwt.SigningMethod { + return key.signingMethod +} + +func (key ecdsaSingingKey) SignKey() interface{} { + return key.key +} + +func (key ecdsaSingingKey) VerifyKey() interface{} { + return key.key.Public() +} + +func (key ecdsaSingingKey) ToJWK() (map[string]string, error) { + pubKey := key.key.Public().(*ecdsa.PublicKey) + + return map[string]string{ + "kty": "EC", + "alg": key.SigningMethod().Alg(), + "kid": key.id, + "crv": pubKey.Params().Name, + "x": base64.RawURLEncoding.EncodeToString(pubKey.X.Bytes()), + "y": base64.RawURLEncoding.EncodeToString(pubKey.Y.Bytes()), + }, nil +} + +func (key ecdsaSingingKey) PreProcessToken(token *jwt.Token) { + token.Header["kid"] = key.id +} + +// createPublicKeyFingerprint creates a fingerprint of the given key. +// The fingerprint is the sha256 sum of the PKIX structure of the key. +func createPublicKeyFingerprint(key interface{}) ([]byte, error) { + bytes, err := x509.MarshalPKIXPublicKey(key) + if err != nil { + return nil, err + } + + checksum := sha256.Sum256(bytes) + + return checksum[:], nil +} + +// CreateJWTSingingKey creates a signing key from an algorithm / key pair. +func CreateJWTSingingKey(algorithm string, key interface{}) (JWTSigningKey, error) { + var signingMethod jwt.SigningMethod + switch algorithm { + case "HS256": + signingMethod = jwt.SigningMethodHS256 + case "HS384": + signingMethod = jwt.SigningMethodHS384 + case "HS512": + signingMethod = jwt.SigningMethodHS512 + + case "RS256": + signingMethod = jwt.SigningMethodRS256 + case "RS384": + signingMethod = jwt.SigningMethodRS384 + case "RS512": + signingMethod = jwt.SigningMethodRS512 + + case "ES256": + signingMethod = jwt.SigningMethodES256 + case "ES384": + signingMethod = jwt.SigningMethodES384 + case "ES512": + signingMethod = jwt.SigningMethodES512 + default: + return nil, ErrInvalidAlgorithmType{algorithm} + } + + switch signingMethod.(type) { + case *jwt.SigningMethodECDSA: + privateKey, ok := key.(*ecdsa.PrivateKey) + if !ok { + return nil, jwt.ErrInvalidKeyType + } + return newECDSASingingKey(signingMethod, privateKey) + case *jwt.SigningMethodRSA: + privateKey, ok := key.(*rsa.PrivateKey) + if !ok { + return nil, jwt.ErrInvalidKeyType + } + return newRSASingingKey(signingMethod, privateKey) + default: + secret, ok := key.([]byte) + if !ok { + return nil, jwt.ErrInvalidKeyType + } + return hmacSigningKey{signingMethod, secret}, nil + } +} + +// DefaultSigningKey is the default signing key for JWTs. +var DefaultSigningKey JWTSigningKey + +// InitSigningKey creates the default signing key from settings or creates a random key. +func InitSigningKey() error { + var err error + var key interface{} + + switch setting.OAuth2.JWTSigningAlgorithm { + case "HS256": + fallthrough + case "HS384": + fallthrough + case "HS512": + key, err = loadOrCreateSymmetricKey() + + case "RS256": + fallthrough + case "RS384": + fallthrough + case "RS512": + fallthrough + case "ES256": + fallthrough + case "ES384": + fallthrough + case "ES512": + key, err = loadOrCreateAsymmetricKey() + + default: + return ErrInvalidAlgorithmType{setting.OAuth2.JWTSigningAlgorithm} + } + + if err != nil { + return fmt.Errorf("Error while loading or creating symmetric key: %v", err) + } + + signingKey, err := CreateJWTSingingKey(setting.OAuth2.JWTSigningAlgorithm, key) + if err != nil { + return err + } + + DefaultSigningKey = signingKey + + return nil +} + +// loadOrCreateSymmetricKey checks if the configured secret is valid. +// If it is not valid a new secret is created and saved in the configuration file. +func loadOrCreateSymmetricKey() (interface{}, error) { + key := make([]byte, 32) + n, err := base64.RawURLEncoding.Decode(key, []byte(setting.OAuth2.JWTSecretBase64)) + if err != nil || n != 32 { + key, err = generate.NewJwtSecret() + if err != nil { + log.Fatal("error generating JWT secret: %v", err) + return nil, err + } + + setting.CreateOrAppendToCustomConf(func(cfg *ini.File) { + secretBase64 := base64.RawURLEncoding.EncodeToString(key) + cfg.Section("oauth2").Key("JWT_SECRET").SetValue(secretBase64) + }) + } + + return key, nil +} + +// loadOrCreateAsymmetricKey checks if the configured private key exists. +// If it does not exist a new random key gets generated and saved on the configured path. +func loadOrCreateAsymmetricKey() (interface{}, error) { + keyPath := setting.OAuth2.JWTSigningPrivateKeyFile + + isExist, err := util.IsExist(keyPath) + if err != nil { + log.Fatal("Unable to check if %s exists. Error: %v", keyPath, err) + } + if !isExist { + err := func() error { + key, err := func() (interface{}, error) { + if strings.HasPrefix(setting.OAuth2.JWTSigningAlgorithm, "RS") { + return rsa.GenerateKey(rand.Reader, 4096) + } + return ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + }() + if err != nil { + return err + } + + bytes, err := x509.MarshalPKCS8PrivateKey(key) + if err != nil { + return err + } + + privateKeyPEM := &pem.Block{Type: "PRIVATE KEY", Bytes: bytes} + + if err := os.MkdirAll(filepath.Dir(keyPath), os.ModePerm); err != nil { + return err + } + + f, err := os.OpenFile(keyPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0600) + if err != nil { + return err + } + defer func() { + if err = f.Close(); err != nil { + log.Error("Close: %v", err) + } + }() + + return pem.Encode(f, privateKeyPEM) + }() + if err != nil { + log.Fatal("Error generating private key: %v", err) + return nil, err + } + } + + bytes, err := ioutil.ReadFile(keyPath) + if err != nil { + return nil, err + } + + block, _ := pem.Decode(bytes) + if block == nil { + return nil, fmt.Errorf("no valid PEM data found in %s", keyPath) + } else if block.Type != "PRIVATE KEY" { + return nil, fmt.Errorf("expected PRIVATE KEY, got %s in %s", block.Type, keyPath) + } + + return x509.ParsePKCS8PrivateKey(block.Bytes) +} diff --git a/modules/auth/pam/pam_test.go b/modules/auth/pam/pam_test.go index eafc9bc3dba4..fa16ff0fe78c 100644 --- a/modules/auth/pam/pam_test.go +++ b/modules/auth/pam/pam_test.go @@ -15,6 +15,6 @@ import ( func TestPamAuth(t *testing.T) { result, err := Auth("gitea", "user1", "false-pwd") assert.Error(t, err) - assert.EqualValues(t, "Authentication failure", err.Error()) + assert.EqualError(t, err, "Authentication failure") assert.Len(t, result, 0) } diff --git a/modules/auth/sso/session.go b/modules/auth/sso/session.go deleted file mode 100644 index 7a546577d86e..000000000000 --- a/modules/auth/sso/session.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2019 The Gitea Authors. All rights reserved. -// Use of this source code is governed by a MIT-style -// license that can be found in the LICENSE file. - -package sso - -import ( - "net/http" - - "code.gitea.io/gitea/models" -) - -// Ensure the struct implements the interface. -var ( - _ SingleSignOn = &Session{} -) - -// Session checks if there is a user uid stored in the session and returns the user -// object for that uid. -type Session struct { -} - -// Init does nothing as the Session implementation does not need to allocate any resources -func (s *Session) Init() error { - return nil -} - -// Free does nothing as the Session implementation does not have to release any resources -func (s *Session) Free() error { - return nil -} - -// IsEnabled returns true as this plugin is enabled by default and its not possible to disable -// it from settings. -func (s *Session) IsEnabled() bool { - return true -} - -// VerifyAuthData checks if there is a user uid stored in the session and returns the user -// object for that uid. -// Returns nil if there is no user uid stored in the session. -func (s *Session) VerifyAuthData(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) *models.User { - user := SessionUser(sess) - if user != nil { - return user - } - return nil -} diff --git a/modules/auth/sso/user.go b/modules/auth/sso/user.go deleted file mode 100644 index 48eebb1e915f..000000000000 --- a/modules/auth/sso/user.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2020 The Gitea Authors. All rights reserved. -// Use of this source code is governed by a MIT-style -// license that can be found in the LICENSE file. - -package sso - -import ( - "net/http" - - "code.gitea.io/gitea/models" -) - -// SignedInUser returns the user object of signed user. -// It returns a bool value to indicate whether user uses basic auth or not. -func SignedInUser(req *http.Request, w http.ResponseWriter, ds DataStore, sess SessionStore) (*models.User, bool) { - if !models.HasEngine { - return nil, false - } - - // Try to sign in with each of the enabled plugins - for _, ssoMethod := range Methods() { - if !ssoMethod.IsEnabled() { - continue - } - user := ssoMethod.VerifyAuthData(req, w, ds, sess) - if user != nil { - _, isBasic := ssoMethod.(*Basic) - return user, isBasic - } - } - - return nil, false -} diff --git a/modules/avatar/avatar.go b/modules/avatar/avatar.go index 44b56c26ce5d..5411a90796f5 100644 --- a/modules/avatar/avatar.go +++ b/modules/avatar/avatar.go @@ -10,12 +10,12 @@ import ( "image" "image/color/palette" - // Enable PNG support: - _ "image/png" - "math/rand" - "time" + _ "image/gif" // for processing gif images + _ "image/jpeg" // for processing jpeg images + _ "image/png" // for processing png images "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" "github.com/issue9/identicon" "github.com/nfnt/resize" @@ -29,8 +29,11 @@ const AvatarSize = 290 // in custom size (height and width). func RandomImageSize(size int, data []byte) (image.Image, error) { randExtent := len(palette.WebSafe) - 32 - rand.Seed(time.Now().UnixNano()) - colorIndex := rand.Intn(randExtent) + integer, err := util.RandomInt(int64(randExtent)) + if err != nil { + return nil, fmt.Errorf("util.RandomInt: %v", err) + } + colorIndex := int(integer) backColorIndex := colorIndex - 1 if backColorIndex < 0 { backColorIndex = randExtent - 1 diff --git a/modules/avatar/avatar_test.go b/modules/avatar/avatar_test.go index 85356056529a..f48266c8587e 100644 --- a/modules/avatar/avatar_test.go +++ b/modules/avatar/avatar_test.go @@ -13,12 +13,17 @@ import ( "github.com/stretchr/testify/assert" ) -func Test_RandomImage(t *testing.T) { - _, err := RandomImage([]byte("gogs@local")) +func Test_RandomImageSize(t *testing.T) { + _, err := RandomImageSize(0, []byte("gitea@local")) + assert.Error(t, err) + + _, err = RandomImageSize(64, []byte("gitea@local")) assert.NoError(t, err) +} - _, err = RandomImageSize(0, []byte("gogs@local")) - assert.Error(t, err) +func Test_RandomImage(t *testing.T) { + _, err := RandomImage([]byte("gitea@local")) + assert.NoError(t, err) } func Test_PrepareWithPNG(t *testing.T) { diff --git a/modules/base/tool.go b/modules/base/tool.go index d721d47e9d77..775fd709cfff 100644 --- a/modules/base/tool.go +++ b/modules/base/tool.go @@ -12,15 +12,14 @@ import ( "encoding/hex" "errors" "fmt" - "net/http" "os" "path/filepath" - "regexp" "runtime" "strconv" "strings" "time" "unicode" + "unicode/utf8" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" @@ -29,15 +28,6 @@ import ( "github.com/dustin/go-humanize" ) -// Use at most this many bytes to determine Content Type. -const sniffLen = 512 - -// SVGMimeType MIME type of SVG images. -const SVGMimeType = "image/svg+xml" - -var svgTagRegex = regexp.MustCompile(`(?si)\A\s*(?:(||>))\s*)*\/]`) -var svgTagInXMLRegex = regexp.MustCompile(`(?si)\A<\?xml\b.*?\?>\s*(?:(||>))\s*)*\/]`) - // EncodeMD5 encodes string to md5 hex value. func EncodeMD5(str string) string { m := md5.New() @@ -213,19 +203,19 @@ func EllipsisString(str string, length int) string { if length <= 3 { return "..." } - if len(str) <= length { + if utf8.RuneCountInString(str) <= length { return str } - return str[:length-3] + "..." + return string([]rune(str)[:length-3]) + "..." } // TruncateString returns a truncated string with given limit, // it returns input string if length is not reached limit. func TruncateString(str string, limit int) string { - if len(str) < limit { + if utf8.RuneCountInString(str) < limit { return str } - return str[:limit] + return string([]rune(str)[:limit]) } // StringsToInt64s converts a slice of string to a slice of int64. @@ -275,63 +265,6 @@ func IsLetter(ch rune) bool { return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= 0x80 && unicode.IsLetter(ch) } -// DetectContentType extends http.DetectContentType with more content types. -func DetectContentType(data []byte) string { - ct := http.DetectContentType(data) - - if len(data) > sniffLen { - data = data[:sniffLen] - } - - if setting.UI.SVG.Enabled && - ((strings.Contains(ct, "text/plain") || strings.Contains(ct, "text/html")) && svgTagRegex.Match(data) || - strings.Contains(ct, "text/xml") && svgTagInXMLRegex.Match(data)) { - - // SVG is unsupported. https://github.com/golang/go/issues/15888 - return SVGMimeType - } - return ct -} - -// IsRepresentableAsText returns true if file content can be represented as -// plain text or is empty. -func IsRepresentableAsText(data []byte) bool { - return IsTextFile(data) || IsSVGImageFile(data) -} - -// IsTextFile returns true if file content format is plain text or empty. -func IsTextFile(data []byte) bool { - if len(data) == 0 { - return true - } - return strings.Contains(DetectContentType(data), "text/") -} - -// IsImageFile detects if data is an image format -func IsImageFile(data []byte) bool { - return strings.Contains(DetectContentType(data), "image/") -} - -// IsSVGImageFile detects if data is an SVG image format -func IsSVGImageFile(data []byte) bool { - return strings.Contains(DetectContentType(data), SVGMimeType) -} - -// IsPDFFile detects if data is a pdf format -func IsPDFFile(data []byte) bool { - return strings.Contains(DetectContentType(data), "application/pdf") -} - -// IsVideoFile detects if data is an video format -func IsVideoFile(data []byte) bool { - return strings.Contains(DetectContentType(data), "video/") -} - -// IsAudioFile detects if data is an video format -func IsAudioFile(data []byte) bool { - return strings.Contains(DetectContentType(data), "audio/") -} - // EntryIcon returns the octicon class for displaying files/directories func EntryIcon(entry *git.TreeEntry) string { switch { diff --git a/modules/base/tool_test.go b/modules/base/tool_test.go index b6baeb8c3ce7..5280827e8ae3 100644 --- a/modules/base/tool_test.go +++ b/modules/base/tool_test.go @@ -5,7 +5,6 @@ package base import ( - "encoding/base64" "os" "testing" "time" @@ -170,6 +169,10 @@ func TestEllipsisString(t *testing.T) { assert.Equal(t, "fo...", EllipsisString("foobar", 5)) assert.Equal(t, "foobar", EllipsisString("foobar", 6)) assert.Equal(t, "foobar", EllipsisString("foobar", 10)) + assert.Equal(t, "测...", EllipsisString("测试文本一二三四", 4)) + assert.Equal(t, "测试...", EllipsisString("测试文本一二三四", 5)) + assert.Equal(t, "测试文...", EllipsisString("测试文本一二三四", 6)) + assert.Equal(t, "测试文本一二三四", EllipsisString("测试文本一二三四", 10)) } func TestTruncateString(t *testing.T) { @@ -181,6 +184,10 @@ func TestTruncateString(t *testing.T) { assert.Equal(t, "fooba", TruncateString("foobar", 5)) assert.Equal(t, "foobar", TruncateString("foobar", 6)) assert.Equal(t, "foobar", TruncateString("foobar", 7)) + assert.Equal(t, "测试文本", TruncateString("测试文本一二三四", 4)) + assert.Equal(t, "测试文本一", TruncateString("测试文本一二三四", 5)) + assert.Equal(t, "测试文本一二", TruncateString("测试文本一二三四", 6)) + assert.Equal(t, "测试文本一二三", TruncateString("测试文本一二三四", 7)) } func TestStringsToInt64s(t *testing.T) { @@ -216,9 +223,9 @@ func TestInt64sToMap(t *testing.T) { func TestInt64sContains(t *testing.T) { assert.Equal(t, map[int64]bool{}, Int64sToMap([]int64{})) - assert.Equal(t, true, Int64sContains([]int64{6, 44324, 4324, 32, 1, 2323}, 1)) - assert.Equal(t, true, Int64sContains([]int64{2323}, 2323)) - assert.Equal(t, false, Int64sContains([]int64{6, 44324, 4324, 32, 1, 2323}, 232)) + assert.True(t, Int64sContains([]int64{6, 44324, 4324, 32, 1, 2323}, 1)) + assert.True(t, Int64sContains([]int64{2323}, 2323)) + assert.False(t, Int64sContains([]int64{6, 44324, 4324, 32, 1, 2323}, 232)) } func TestIsLetter(t *testing.T) { @@ -238,102 +245,11 @@ func TestIsLetter(t *testing.T) { assert.False(t, IsLetter(0x93)) } -func TestDetectContentTypeLongerThanSniffLen(t *testing.T) { - // Pre-condition: Shorter than sniffLen detects SVG. - assert.Equal(t, "image/svg+xml", DetectContentType([]byte(``))) - // Longer than sniffLen detects something else. - assert.Equal(t, "text/plain; charset=utf-8", DetectContentType([]byte(``))) -} - -// IsRepresentableAsText - -func TestIsTextFile(t *testing.T) { - assert.True(t, IsTextFile([]byte{})) - assert.True(t, IsTextFile([]byte("lorem ipsum"))) -} - -func TestIsImageFile(t *testing.T) { - png, _ := base64.StdEncoding.DecodeString("iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAG0lEQVQYlWN4+vTpf3SMDTAMBYXYBLFpHgoKAeiOf0SGE9kbAAAAAElFTkSuQmCC") - assert.True(t, IsImageFile(png)) - assert.False(t, IsImageFile([]byte("plain text"))) -} - -func TestIsSVGImageFile(t *testing.T) { - assert.True(t, IsSVGImageFile([]byte(""))) - assert.True(t, IsSVGImageFile([]byte(" "))) - assert.True(t, IsSVGImageFile([]byte(``))) - assert.True(t, IsSVGImageFile([]byte(""))) - assert.True(t, IsSVGImageFile([]byte(``))) - assert.True(t, IsSVGImageFile([]byte(` - `))) - assert.True(t, IsSVGImageFile([]byte(` - - `))) - assert.True(t, IsSVGImageFile([]byte(` - `))) - assert.True(t, IsSVGImageFile([]byte(` - `))) - assert.True(t, IsSVGImageFile([]byte(` - - `))) - assert.True(t, IsSVGImageFile([]byte(` - - - `))) - assert.True(t, IsSVGImageFile([]byte(` - - `))) - assert.True(t, IsSVGImageFile([]byte(` - - - `))) - assert.False(t, IsSVGImageFile([]byte{})) - assert.False(t, IsSVGImageFile([]byte("svg"))) - assert.False(t, IsSVGImageFile([]byte(""))) - assert.False(t, IsSVGImageFile([]byte("text"))) - assert.False(t, IsSVGImageFile([]byte(""))) - assert.False(t, IsSVGImageFile([]byte(``))) - assert.False(t, IsSVGImageFile([]byte(` - `))) - assert.False(t, IsSVGImageFile([]byte(` - - `))) -} - -func TestIsPDFFile(t *testing.T) { - pdf, _ := base64.StdEncoding.DecodeString("JVBERi0xLjYKJcOkw7zDtsOfCjIgMCBvYmoKPDwvTGVuZ3RoIDMgMCBSL0ZpbHRlci9GbGF0ZURlY29kZT4+CnN0cmVhbQp4nF3NPwsCMQwF8D2f4s2CNYk1baF0EHRwOwg4iJt/NsFb/PpevUE4Mjwe") - assert.True(t, IsPDFFile(pdf)) - assert.False(t, IsPDFFile([]byte("plain text"))) -} - -func TestIsVideoFile(t *testing.T) { - mp4, _ := base64.StdEncoding.DecodeString("AAAAGGZ0eXBtcDQyAAAAAGlzb21tcDQyAAEI721vb3YAAABsbXZoZAAAAADaBlwX2gZcFwAAA+gA") - assert.True(t, IsVideoFile(mp4)) - assert.False(t, IsVideoFile([]byte("plain text"))) -} - -func TestIsAudioFile(t *testing.T) { - mp3, _ := base64.StdEncoding.DecodeString("SUQzBAAAAAABAFRYWFgAAAASAAADbWFqb3JfYnJhbmQAbXA0MgBUWFhYAAAAEQAAA21pbm9yX3Zl") - assert.True(t, IsAudioFile(mp3)) - assert.False(t, IsAudioFile([]byte("plain text"))) -} - // TODO: Test EntryIcon func TestSetupGiteaRoot(t *testing.T) { _ = os.Setenv("GITEA_ROOT", "test") - assert.EqualValues(t, "test", SetupGiteaRoot()) + assert.Equal(t, "test", SetupGiteaRoot()) _ = os.Setenv("GITEA_ROOT", "") assert.NotEqual(t, "test", SetupGiteaRoot()) } diff --git a/modules/cache/cache_twoqueue.go b/modules/cache/cache_twoqueue.go new file mode 100644 index 000000000000..7d8fa7c93416 --- /dev/null +++ b/modules/cache/cache_twoqueue.go @@ -0,0 +1,204 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "strconv" + "sync" + "time" + + mc "gitea.com/go-chi/cache" + lru "github.com/hashicorp/golang-lru" + jsoniter "github.com/json-iterator/go" +) + +// TwoQueueCache represents a LRU 2Q cache adapter implementation +type TwoQueueCache struct { + lock sync.Mutex + cache *lru.TwoQueueCache + interval int +} + +// TwoQueueCacheConfig describes the configuration for TwoQueueCache +type TwoQueueCacheConfig struct { + Size int `ini:"SIZE" json:"size"` + RecentRatio float64 `ini:"RECENT_RATIO" json:"recent_ratio"` + GhostRatio float64 `ini:"GHOST_RATIO" json:"ghost_ratio"` +} + +// MemoryItem represents a memory cache item. +type MemoryItem struct { + Val interface{} + Created int64 + Timeout int64 +} + +func (item *MemoryItem) hasExpired() bool { + return item.Timeout > 0 && + (time.Now().Unix()-item.Created) >= item.Timeout +} + +var _ mc.Cache = &TwoQueueCache{} + +// Put puts value into cache with key and expire time. +func (c *TwoQueueCache) Put(key string, val interface{}, timeout int64) error { + item := &MemoryItem{ + Val: val, + Created: time.Now().Unix(), + Timeout: timeout, + } + c.lock.Lock() + defer c.lock.Unlock() + c.cache.Add(key, item) + return nil +} + +// Get gets cached value by given key. +func (c *TwoQueueCache) Get(key string) interface{} { + c.lock.Lock() + defer c.lock.Unlock() + cached, ok := c.cache.Get(key) + if !ok { + return nil + } + item, ok := cached.(*MemoryItem) + + if !ok || item.hasExpired() { + c.cache.Remove(key) + return nil + } + + return item.Val +} + +// Delete deletes cached value by given key. +func (c *TwoQueueCache) Delete(key string) error { + c.lock.Lock() + defer c.lock.Unlock() + c.cache.Remove(key) + return nil +} + +// Incr increases cached int-type value by given key as a counter. +func (c *TwoQueueCache) Incr(key string) error { + c.lock.Lock() + defer c.lock.Unlock() + cached, ok := c.cache.Get(key) + if !ok { + return nil + } + item, ok := cached.(*MemoryItem) + + if !ok || item.hasExpired() { + c.cache.Remove(key) + return nil + } + + var err error + item.Val, err = mc.Incr(item.Val) + return err +} + +// Decr decreases cached int-type value by given key as a counter. +func (c *TwoQueueCache) Decr(key string) error { + c.lock.Lock() + defer c.lock.Unlock() + cached, ok := c.cache.Get(key) + if !ok { + return nil + } + item, ok := cached.(*MemoryItem) + + if !ok || item.hasExpired() { + c.cache.Remove(key) + return nil + } + + var err error + item.Val, err = mc.Decr(item.Val) + return err +} + +// IsExist returns true if cached value exists. +func (c *TwoQueueCache) IsExist(key string) bool { + c.lock.Lock() + defer c.lock.Unlock() + cached, ok := c.cache.Peek(key) + if !ok { + return false + } + item, ok := cached.(*MemoryItem) + if !ok || item.hasExpired() { + c.cache.Remove(key) + return false + } + + return true +} + +// Flush deletes all cached data. +func (c *TwoQueueCache) Flush() error { + c.lock.Lock() + defer c.lock.Unlock() + c.cache.Purge() + return nil +} + +func (c *TwoQueueCache) checkAndInvalidate(key interface{}) { + c.lock.Lock() + defer c.lock.Unlock() + cached, ok := c.cache.Peek(key) + if !ok { + return + } + item, ok := cached.(*MemoryItem) + if !ok || item.hasExpired() { + c.cache.Remove(item) + } +} + +func (c *TwoQueueCache) startGC() { + if c.interval < 0 { + return + } + for _, key := range c.cache.Keys() { + c.checkAndInvalidate(key) + } + time.AfterFunc(time.Duration(c.interval)*time.Second, c.startGC) +} + +// StartAndGC starts GC routine based on config string settings. +func (c *TwoQueueCache) StartAndGC(opts mc.Options) error { + var err error + size := 50000 + if opts.AdapterConfig != "" { + size, err = strconv.Atoi(opts.AdapterConfig) + } + if err != nil { + json := jsoniter.ConfigCompatibleWithStandardLibrary + if !json.Valid([]byte(opts.AdapterConfig)) { + return err + } + + cfg := &TwoQueueCacheConfig{ + Size: 50000, + RecentRatio: lru.Default2QRecentRatio, + GhostRatio: lru.Default2QGhostEntries, + } + _ = json.Unmarshal([]byte(opts.AdapterConfig), cfg) + c.cache, err = lru.New2QParams(cfg.Size, cfg.RecentRatio, cfg.GhostRatio) + } else { + c.cache, err = lru.New2Q(size) + } + c.interval = opts.Interval + if c.interval > 0 { + go c.startGC() + } + return err +} + +func init() { + mc.Register("twoqueue", &TwoQueueCache{}) +} diff --git a/modules/context/api.go b/modules/context/api.go index cbd90c50e4b8..506824674522 100644 --- a/modules/context/api.go +++ b/modules/context/api.go @@ -14,11 +14,11 @@ import ( "strings" "code.gitea.io/gitea/models" - "code.gitea.io/gitea/modules/auth/sso" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/web/middleware" + "code.gitea.io/gitea/services/auth" "gitea.com/go-chi/session" ) @@ -217,6 +217,26 @@ func (ctx *APIContext) CheckForOTP() { } } +// APIAuth converts auth.Auth as a middleware +func APIAuth(authMethod auth.Auth) func(*APIContext) { + return func(ctx *APIContext) { + // Get user from session if logged in. + ctx.User = authMethod.Verify(ctx.Req, ctx.Resp, ctx, ctx.Session) + if ctx.User != nil { + ctx.IsBasicAuth = ctx.Data["AuthedMethod"].(string) == new(auth.Basic).Name() + ctx.IsSigned = true + ctx.Data["IsSigned"] = ctx.IsSigned + ctx.Data["SignedUser"] = ctx.User + ctx.Data["SignedUserID"] = ctx.User.ID + ctx.Data["SignedUserName"] = ctx.User.Name + ctx.Data["IsAdmin"] = ctx.User.IsAdmin + } else { + ctx.Data["SignedUserID"] = int64(0) + ctx.Data["SignedUserName"] = "" + } + } +} + // APIContexter returns apicontext as middleware func APIContexter() func(http.Handler) http.Handler { var csrfOpts = getCsrfOpts() @@ -250,20 +270,6 @@ func APIContexter() func(http.Handler) http.Handler { } } - // Get user from session if logged in. - ctx.User, ctx.IsBasicAuth = sso.SignedInUser(ctx.Req, ctx.Resp, &ctx, ctx.Session) - if ctx.User != nil { - ctx.IsSigned = true - ctx.Data["IsSigned"] = ctx.IsSigned - ctx.Data["SignedUser"] = ctx.User - ctx.Data["SignedUserID"] = ctx.User.ID - ctx.Data["SignedUserName"] = ctx.User.Name - ctx.Data["IsAdmin"] = ctx.User.IsAdmin - } else { - ctx.Data["SignedUserID"] = int64(0) - ctx.Data["SignedUserName"] = "" - } - ctx.Resp.Header().Set(`X-Frame-Options`, `SAMEORIGIN`) ctx.Data["CsrfToken"] = html.EscapeString(ctx.csrf.GetToken()) diff --git a/modules/context/context.go b/modules/context/context.go index 2c27378cb075..c6829d5c26a2 100644 --- a/modules/context/context.go +++ b/modules/context/context.go @@ -21,14 +21,15 @@ import ( "time" "code.gitea.io/gitea/models" - "code.gitea.io/gitea/modules/auth/sso" "code.gitea.io/gitea/modules/base" mc "code.gitea.io/gitea/modules/cache" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/translation" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web/middleware" + "code.gitea.io/gitea/services/auth" "gitea.com/go-chi/cache" "gitea.com/go-chi/session" @@ -319,6 +320,11 @@ func (ctx *Context) QueryBool(key string, defaults ...bool) bool { return (*Forms)(ctx.Req).MustBool(key, defaults...) } +// QueryOptionalBool returns request form as OptionalBool with default +func (ctx *Context) QueryOptionalBool(key string, defaults ...util.OptionalBool) util.OptionalBool { + return (*Forms)(ctx.Req).MustOptionalBool(key, defaults...) +} + // HandleText handles HTTP status code func (ctx *Context) HandleText(status int, title string) { if (status/100 == 4) || (status/100 == 5) { @@ -374,6 +380,21 @@ func (ctx *Context) ServeFile(file string, names ...string) { http.ServeFile(ctx.Resp, ctx.Req, file) } +// ServeStream serves file via io stream +func (ctx *Context) ServeStream(rd io.Reader, name string) { + ctx.Resp.Header().Set("Content-Description", "File Transfer") + ctx.Resp.Header().Set("Content-Type", "application/octet-stream") + ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+name) + ctx.Resp.Header().Set("Content-Transfer-Encoding", "binary") + ctx.Resp.Header().Set("Expires", "0") + ctx.Resp.Header().Set("Cache-Control", "must-revalidate") + ctx.Resp.Header().Set("Pragma", "public") + _, err := io.Copy(ctx.Resp, rd) + if err != nil { + ctx.ServerError("Download file failed", err) + } +} + // Error returned an error to web browser func (ctx *Context) Error(status int, contents ...string) { var v = http.StatusText(status) @@ -509,7 +530,7 @@ func (ctx *Context) ParamsInt64(p string) int64 { // SetParams set params into routes func (ctx *Context) SetParams(k, v string) { - chiCtx := chi.RouteContext(ctx.Req.Context()) + chiCtx := chi.RouteContext(ctx) chiCtx.URLParams.Add(strings.TrimPrefix(k, ":"), url.PathEscape(v)) } @@ -528,6 +549,26 @@ func (ctx *Context) Status(status int) { ctx.Resp.WriteHeader(status) } +// Deadline is part of the interface for context.Context and we pass this to the request context +func (ctx *Context) Deadline() (deadline time.Time, ok bool) { + return ctx.Req.Context().Deadline() +} + +// Done is part of the interface for context.Context and we pass this to the request context +func (ctx *Context) Done() <-chan struct{} { + return ctx.Req.Context().Done() +} + +// Err is part of the interface for context.Context and we pass this to the request context +func (ctx *Context) Err() error { + return ctx.Req.Context().Err() +} + +// Value is part of the interface for context.Context and we pass this to the request context +func (ctx *Context) Value(key interface{}) interface{} { + return ctx.Req.Context().Value(key) +} + // Handler represents a custom handler type Handler func(*Context) @@ -585,6 +626,28 @@ func getCsrfOpts() CsrfOptions { } } +// Auth converts auth.Auth as a middleware +func Auth(authMethod auth.Auth) func(*Context) { + return func(ctx *Context) { + ctx.User = authMethod.Verify(ctx.Req, ctx.Resp, ctx, ctx.Session) + if ctx.User != nil { + ctx.IsBasicAuth = ctx.Data["AuthedMethod"].(string) == new(auth.Basic).Name() + ctx.IsSigned = true + ctx.Data["IsSigned"] = ctx.IsSigned + ctx.Data["SignedUser"] = ctx.User + ctx.Data["SignedUserID"] = ctx.User.ID + ctx.Data["SignedUserName"] = ctx.User.Name + ctx.Data["IsAdmin"] = ctx.User.IsAdmin + } else { + ctx.Data["SignedUserID"] = int64(0) + ctx.Data["SignedUserName"] = "" + + // ensure the session uid is deleted + _ = ctx.Session.Delete("uid") + } + } +} + // Contexter initializes a classic context for a request. func Contexter() func(next http.Handler) http.Handler { var rnd = templates.HTMLRenderer() @@ -670,21 +733,6 @@ func Contexter() func(next http.Handler) http.Handler { } } - // Get user from session if logged in. - ctx.User, ctx.IsBasicAuth = sso.SignedInUser(ctx.Req, ctx.Resp, &ctx, ctx.Session) - - if ctx.User != nil { - ctx.IsSigned = true - ctx.Data["IsSigned"] = ctx.IsSigned - ctx.Data["SignedUser"] = ctx.User - ctx.Data["SignedUserID"] = ctx.User.ID - ctx.Data["SignedUserName"] = ctx.User.Name - ctx.Data["IsAdmin"] = ctx.User.IsAdmin - } else { - ctx.Data["SignedUserID"] = int64(0) - ctx.Data["SignedUserName"] = "" - } - ctx.Resp.Header().Set(`X-Frame-Options`, `SAMEORIGIN`) ctx.Data["CsrfToken"] = html.EscapeString(ctx.csrf.GetToken()) @@ -692,6 +740,7 @@ func Contexter() func(next http.Handler) http.Handler { //log.Debug("Session ID: %s", ctx.Session.ID()) //log.Debug("CSRF Token: %v", ctx.Data["CsrfToken"]) + // FIXME: do we really always need these setting? There should be someway to have to avoid having to always set these ctx.Data["IsLandingPageHome"] = setting.LandingPageURL == setting.LandingPageHome ctx.Data["IsLandingPageExplore"] = setting.LandingPageURL == setting.LandingPageExplore ctx.Data["IsLandingPageOrganizations"] = setting.LandingPageURL == setting.LandingPageOrganizations @@ -708,6 +757,11 @@ func Contexter() func(next http.Handler) http.Handler { ctx.Data["ManifestData"] = setting.ManifestData + ctx.Data["UnitWikiGlobalDisabled"] = models.UnitTypeWiki.UnitGlobalDisabled() + ctx.Data["UnitIssuesGlobalDisabled"] = models.UnitTypeIssues.UnitGlobalDisabled() + ctx.Data["UnitPullsGlobalDisabled"] = models.UnitTypePullRequests.UnitGlobalDisabled() + ctx.Data["UnitProjectsGlobalDisabled"] = models.UnitTypeProjects.UnitGlobalDisabled() + ctx.Data["i18n"] = locale ctx.Data["Tr"] = i18n.Tr ctx.Data["Lang"] = locale.Language() diff --git a/modules/context/csrf.go b/modules/context/csrf.go index ba0e9f6cde77..8d179ca90474 100644 --- a/modules/context/csrf.go +++ b/modules/context/csrf.go @@ -22,6 +22,7 @@ import ( "net/http" "time" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/web/middleware" "github.com/unknwon/com" @@ -266,7 +267,12 @@ func Validate(ctx *Context, x CSRF) { -1, x.GetCookiePath(), x.GetCookieDomain()) // FIXME: Do we need to set the Secure, httpOnly and SameSite values too? - x.Error(ctx.Resp) + if middleware.IsAPIPath(ctx.Req) { + x.Error(ctx.Resp) + return + } + ctx.Flash.Error(ctx.Tr("error.invalid_csrf")) + ctx.Redirect(setting.AppSubURL + "/") } return } @@ -277,10 +283,19 @@ func Validate(ctx *Context, x CSRF) { -1, x.GetCookiePath(), x.GetCookieDomain()) // FIXME: Do we need to set the Secure, httpOnly and SameSite values too? - x.Error(ctx.Resp) + if middleware.IsAPIPath(ctx.Req) { + x.Error(ctx.Resp) + return + } + ctx.Flash.Error(ctx.Tr("error.invalid_csrf")) + ctx.Redirect(setting.AppSubURL + "/") } return } - - http.Error(ctx.Resp, "Bad Request: no CSRF token present", http.StatusBadRequest) + if middleware.IsAPIPath(ctx.Req) { + http.Error(ctx.Resp, "Bad Request: no CSRF token present", http.StatusBadRequest) + return + } + ctx.Flash.Error(ctx.Tr("error.missing_csrf")) + ctx.Redirect(setting.AppSubURL + "/") } diff --git a/modules/context/form.go b/modules/context/form.go index c7b76c614c27..e3afad0a9046 100644 --- a/modules/context/form.go +++ b/modules/context/form.go @@ -13,6 +13,7 @@ import ( "text/template" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/util" ) // Forms a new enhancement of http.Request @@ -225,3 +226,16 @@ func (f *Forms) MustBool(key string, defaults ...bool) bool { } return v } + +// MustOptionalBool returns request form as OptionalBool with default +func (f *Forms) MustOptionalBool(key string, defaults ...util.OptionalBool) util.OptionalBool { + value := (*http.Request)(f).FormValue(key) + if len(value) == 0 { + return util.OptionalBoolNone + } + v, err := strconv.ParseBool((*http.Request)(f).FormValue(key)) + if len(defaults) > 0 && err != nil { + return defaults[0] + } + return util.OptionalBoolOf(v) +} diff --git a/modules/context/repo.go b/modules/context/repo.go index 5ce31e9e3504..ea8323bdfc22 100644 --- a/modules/context/repo.go +++ b/modules/context/repo.go @@ -6,6 +6,7 @@ package context import ( + "context" "fmt" "io/ioutil" "net/url" @@ -37,7 +38,7 @@ var IssueTemplateDirCandidates = []string{ ".gitlab/issue_template", } -// PullRequest contains informations to make a pull request +// PullRequest contains information to make a pull request type PullRequest struct { BaseRepo *models.Repository Allowed bool @@ -359,13 +360,17 @@ func repoAssignment(ctx *Context, repo *models.Repository) { var err error ctx.Repo.Mirror, err = models.GetMirrorByRepoID(repo.ID) if err != nil { - ctx.ServerError("GetMirror", err) + ctx.ServerError("GetMirrorByRepoID", err) return } ctx.Data["MirrorEnablePrune"] = ctx.Repo.Mirror.EnablePrune ctx.Data["MirrorInterval"] = ctx.Repo.Mirror.Interval ctx.Data["Mirror"] = ctx.Repo.Mirror } + if err = repo.LoadPushMirrors(); err != nil { + ctx.ServerError("LoadPushMirrors", err) + return + } ctx.Repo.Repository = repo ctx.Data["RepoName"] = ctx.Repo.Repository.Name @@ -393,7 +398,7 @@ func RepoIDAssignment() func(ctx *Context) { } // RepoAssignment returns a middleware to handle repository assignment -func RepoAssignment(ctx *Context) { +func RepoAssignment(ctx *Context) (cancel context.CancelFunc) { var ( owner *models.User err error @@ -529,12 +534,12 @@ func RepoAssignment(ctx *Context) { ctx.Repo.GitRepo = gitRepo // We opened it, we should close it - defer func() { + cancel = func() { // If it's been set to nil then assume someone else has closed it. if ctx.Repo.GitRepo != nil { ctx.Repo.GitRepo.Close() } - }() + } // Stop at this point when the repo is empty. if ctx.Repo.Repository.IsEmpty { @@ -619,6 +624,7 @@ func RepoAssignment(ctx *Context) { ctx.Data["GoDocDirectory"] = prefix + "{/dir}" ctx.Data["GoDocFile"] = prefix + "{/dir}/{file}#L{line}" } + return } // RepoRefType type of repo reference @@ -643,7 +649,7 @@ const ( // RepoRef handles repository reference names when the ref name is not // explicitly given -func RepoRef() func(*Context) { +func RepoRef() func(*Context) context.CancelFunc { // since no ref name is explicitly specified, ok to just use branch return RepoRefByType(RepoRefBranch) } @@ -722,8 +728,8 @@ func getRefName(ctx *Context, pathType RepoRefType) string { // RepoRefByType handles repository reference name for a specific type // of repository reference -func RepoRefByType(refType RepoRefType) func(*Context) { - return func(ctx *Context) { +func RepoRefByType(refType RepoRefType, ignoreNotExistErr ...bool) func(*Context) context.CancelFunc { + return func(ctx *Context) (cancel context.CancelFunc) { // Empty repository does not have reference information. if ctx.Repo.Repository.IsEmpty { return @@ -742,12 +748,12 @@ func RepoRefByType(refType RepoRefType) func(*Context) { return } // We opened it, we should close it - defer func() { + cancel = func() { // If it's been set to nil then assume someone else has closed it. if ctx.Repo.GitRepo != nil { ctx.Repo.GitRepo.Close() } - }() + } } // Get default branch. @@ -811,6 +817,9 @@ func RepoRefByType(refType RepoRefType) func(*Context) { util.URLJoin(setting.AppURL, strings.Replace(ctx.Req.URL.RequestURI(), refName, ctx.Repo.Commit.ID.String(), 1)))) } } else { + if len(ignoreNotExistErr) > 0 && ignoreNotExistErr[0] { + return + } ctx.NotFound("RepoRef invalid repo", fmt.Errorf("branch or tag not exist: %s", refName)) return } @@ -841,6 +850,7 @@ func RepoRefByType(refType RepoRefType) func(*Context) { return } ctx.Data["CommitsCount"] = ctx.Repo.CommitsCount + return } } @@ -899,12 +909,18 @@ func (ctx *Context) IssueTemplatesFromDefaultBranch() []api.IssueTemplate { log.Debug("DataAsync: %v", err) continue } - defer r.Close() + closed := false + defer func() { + if !closed { + _ = r.Close() + } + }() data, err := ioutil.ReadAll(r) if err != nil { log.Debug("ReadAll: %v", err) continue } + _ = r.Close() var it api.IssueTemplate content, err := markdown.ExtractMetadata(string(data), &it) if err != nil { diff --git a/modules/context/response.go b/modules/context/response.go index 4ffbd230a20d..a20fc63536e0 100644 --- a/modules/context/response.go +++ b/modules/context/response.go @@ -49,7 +49,7 @@ func (r *Response) Write(bs []byte) (int, error) { return size, err } if r.status == 0 { - r.WriteHeader(200) + r.status = http.StatusOK } return size, nil } diff --git a/modules/context/secret.go b/modules/context/secret.go deleted file mode 100644 index fcb488d211a0..000000000000 --- a/modules/context/secret.go +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright 2019 The Gitea Authors. All rights reserved. -// Use of this source code is governed by a MIT-style -// license that can be found in the LICENSE file. - -package context - -import ( - "crypto/aes" - "crypto/cipher" - "crypto/rand" - "crypto/sha256" - "encoding/base64" - "errors" - "io" -) - -// NewSecret creates a new secret -func NewSecret() (string, error) { - return NewSecretWithLength(32) -} - -// NewSecretWithLength creates a new secret for a given length -func NewSecretWithLength(length int64) (string, error) { - return randomString(length) -} - -func randomBytes(len int64) ([]byte, error) { - b := make([]byte, len) - if _, err := rand.Read(b); err != nil { - return nil, err - } - return b, nil -} - -func randomString(len int64) (string, error) { - b, err := randomBytes(len) - return base64.URLEncoding.EncodeToString(b), err -} - -// AesEncrypt encrypts text and given key with AES. -func AesEncrypt(key, text []byte) ([]byte, error) { - block, err := aes.NewCipher(key) - if err != nil { - return nil, err - } - b := base64.StdEncoding.EncodeToString(text) - ciphertext := make([]byte, aes.BlockSize+len(b)) - iv := ciphertext[:aes.BlockSize] - if _, err := io.ReadFull(rand.Reader, iv); err != nil { - return nil, err - } - cfb := cipher.NewCFBEncrypter(block, iv) - cfb.XORKeyStream(ciphertext[aes.BlockSize:], []byte(b)) - return ciphertext, nil -} - -// AesDecrypt decrypts text and given key with AES. -func AesDecrypt(key, text []byte) ([]byte, error) { - block, err := aes.NewCipher(key) - if err != nil { - return nil, err - } - if len(text) < aes.BlockSize { - return nil, errors.New("ciphertext too short") - } - iv := text[:aes.BlockSize] - text = text[aes.BlockSize:] - cfb := cipher.NewCFBDecrypter(block, iv) - cfb.XORKeyStream(text, text) - data, err := base64.StdEncoding.DecodeString(string(text)) - if err != nil { - return nil, err - } - return data, nil -} - -// EncryptSecret encrypts a string with given key into a hex string -func EncryptSecret(key string, str string) (string, error) { - keyHash := sha256.Sum256([]byte(key)) - plaintext := []byte(str) - ciphertext, err := AesEncrypt(keyHash[:], plaintext) - if err != nil { - return "", err - } - return base64.StdEncoding.EncodeToString(ciphertext), nil -} - -// DecryptSecret decrypts a previously encrypted hex string -func DecryptSecret(key string, cipherhex string) (string, error) { - keyHash := sha256.Sum256([]byte(key)) - ciphertext, err := base64.StdEncoding.DecodeString(cipherhex) - if err != nil { - return "", err - } - plaintext, err := AesDecrypt(keyHash[:], ciphertext) - if err != nil { - return "", err - } - return string(plaintext), nil -} diff --git a/modules/convert/convert.go b/modules/convert/convert.go index 109931dbc343..9a4714b4e054 100644 --- a/modules/convert/convert.go +++ b/modules/convert/convert.go @@ -8,6 +8,7 @@ package convert import ( "fmt" "strconv" + "strings" "time" "code.gitea.io/gitea/models" @@ -135,6 +136,7 @@ func ToBranchProtection(bp *models.ProtectedBranch) *api.BranchProtection { func ToTag(repo *models.Repository, t *git.Tag) *api.Tag { return &api.Tag{ Name: t.Name, + Message: strings.TrimSpace(t.Message), ID: t.ID.String(), Commit: ToCommitMeta(repo, t), ZipballURL: util.URLJoin(repo.HTMLURL(), "archive", t.Name+".zip"), @@ -189,6 +191,7 @@ func ToGPGKey(key *models.GPGKey) *api.GPGKey { CanEncryptComms: k.CanEncryptComms, CanEncryptStorage: k.CanEncryptStorage, CanCertify: k.CanSign, + Verified: k.Verified, } } emails := make([]*api.GPGKeyEmail, len(key.Emails)) @@ -208,6 +211,7 @@ func ToGPGKey(key *models.GPGKey) *api.GPGKey { CanEncryptComms: key.CanEncryptComms, CanEncryptStorage: key.CanEncryptStorage, CanCertify: key.CanSign, + Verified: key.Verified, } } diff --git a/modules/convert/git_commit.go b/modules/convert/git_commit.go index c647dd4e1843..fd4f12ecfa6d 100644 --- a/modules/convert/git_commit.go +++ b/modules/convert/git_commit.go @@ -155,8 +155,8 @@ func ToCommit(repo *models.Repository, commit *git.Commit, userCache map[string] URL: repo.APIURL() + "/git/commits/" + commit.ID.String(), Author: &api.CommitUser{ Identity: api.Identity{ - Name: commit.Committer.Name, - Email: commit.Committer.Email, + Name: commit.Author.Name, + Email: commit.Author.Email, }, Date: commit.Author.When.Format(time.RFC3339), }, diff --git a/modules/convert/notification.go b/modules/convert/notification.go index cc941678b603..b0888ee09f0c 100644 --- a/modules/convert/notification.go +++ b/modules/convert/notification.go @@ -27,7 +27,7 @@ func ToNotificationThread(n *models.Notification) *api.NotificationThread { //handle Subject switch n.Source { case models.NotificationSourceIssue: - result.Subject = &api.NotificationSubject{Type: "Issue"} + result.Subject = &api.NotificationSubject{Type: api.NotifySubjectIssue} if n.Issue != nil { result.Subject.Title = n.Issue.Title result.Subject.URL = n.Issue.APIURL() @@ -38,7 +38,7 @@ func ToNotificationThread(n *models.Notification) *api.NotificationThread { } } case models.NotificationSourcePullRequest: - result.Subject = &api.NotificationSubject{Type: "Pull"} + result.Subject = &api.NotificationSubject{Type: api.NotifySubjectPull} if n.Issue != nil { result.Subject.Title = n.Issue.Title result.Subject.URL = n.Issue.APIURL() @@ -55,13 +55,13 @@ func ToNotificationThread(n *models.Notification) *api.NotificationThread { } case models.NotificationSourceCommit: result.Subject = &api.NotificationSubject{ - Type: "Commit", + Type: api.NotifySubjectCommit, Title: n.CommitID, URL: n.Repository.HTMLURL() + "/commit/" + n.CommitID, } case models.NotificationSourceRepository: result.Subject = &api.NotificationSubject{ - Type: "Repository", + Type: api.NotifySubjectRepository, Title: n.Repository.FullName(), URL: n.Repository.Link(), } diff --git a/modules/convert/repository.go b/modules/convert/repository.go index 9a4fbb97caec..7f3d67137f75 100644 --- a/modules/convert/repository.go +++ b/modules/convert/repository.go @@ -91,7 +91,7 @@ func innerToRepo(repo *models.Repository, mode models.AccessMode, isParent bool) return nil } - numReleases, _ := models.GetReleaseCountByRepoID(repo.ID, models.FindReleasesOptions{IncludeDrafts: false, IncludeTags: true}) + numReleases, _ := models.GetReleaseCountByRepoID(repo.ID, models.FindReleasesOptions{IncludeDrafts: false, IncludeTags: false}) mirrorInterval := "" if repo.IsMirror { diff --git a/modules/convert/user.go b/modules/convert/user.go index 364914f2ad02..164ffb71fd2e 100644 --- a/modules/convert/user.go +++ b/modules/convert/user.go @@ -25,6 +25,15 @@ func ToUser(user, doer *models.User) *api.User { return toUser(user, signed, authed) } +// ToUsers convert list of models.User to list of api.User +func ToUsers(doer *models.User, users []*models.User) []*api.User { + result := make([]*api.User, len(users)) + for i := range users { + result[i] = ToUser(users[i], doer) + } + return result +} + // ToUserWithAccessMode convert models.User to api.User // AccessMode is not none show add some more information func ToUserWithAccessMode(user *models.User, accessMode models.AccessMode) *api.User { @@ -48,16 +57,41 @@ func toUser(user *models.User, signed, authed bool) *api.User { Location: user.Location, Website: user.Website, Description: user.Description, + // counter's + Followers: user.NumFollowers, + Following: user.NumFollowing, + StarredRepos: user.NumStars, } + + result.Visibility = user.Visibility.String() + // hide primary email if API caller is anonymous or user keep email private if signed && (!user.KeepEmailPrivate || authed) { result.Email = user.Email } + // only site admin will get these information and possibly user himself if authed { result.IsAdmin = user.IsAdmin result.LastLogin = user.LastLoginUnix.AsTime() result.Language = user.Language + result.IsActive = user.IsActive + result.ProhibitLogin = user.ProhibitLogin } return result } + +// User2UserSettings return UserSettings based on a user +func User2UserSettings(user *models.User) api.UserSettings { + return api.UserSettings{ + FullName: user.FullName, + Website: user.Website, + Location: user.Location, + Language: user.Language, + Description: user.Description, + Theme: user.Theme, + HideEmail: user.KeepEmailPrivate, + HideActivity: user.KeepActivityPrivate, + DiffViewStyle: user.DiffViewStyle, + } +} diff --git a/modules/convert/user_test.go b/modules/convert/user_test.go index 39396534410f..679c4f98948a 100644 --- a/modules/convert/user_test.go +++ b/modules/convert/user_test.go @@ -8,10 +8,13 @@ import ( "testing" "code.gitea.io/gitea/models" + api "code.gitea.io/gitea/modules/structs" + "github.com/stretchr/testify/assert" ) func TestUser_ToUser(t *testing.T) { + assert.NoError(t, models.PrepareTestDatabase()) user1 := models.AssertExistsAndLoadBean(t, &models.User{ID: 1, IsAdmin: true}).(*models.User) @@ -25,4 +28,11 @@ func TestUser_ToUser(t *testing.T) { apiUser = toUser(user1, false, false) assert.False(t, apiUser.IsAdmin) + assert.EqualValues(t, api.VisibleTypePublic.String(), apiUser.Visibility) + + user31 := models.AssertExistsAndLoadBean(t, &models.User{ID: 31, IsAdmin: false, Visibility: api.VisibleTypePrivate}).(*models.User) + + apiUser = toUser(user31, true, true) + assert.False(t, apiUser.IsAdmin) + assert.EqualValues(t, api.VisibleTypePrivate.String(), apiUser.Visibility) } diff --git a/modules/cron/tasks_extended.go b/modules/cron/tasks_extended.go index 4a37e5d242ee..680f83e50c87 100644 --- a/modules/cron/tasks_extended.go +++ b/modules/cron/tasks_extended.go @@ -33,7 +33,7 @@ func registerDeleteRepositoryArchives() { RunAtStart: false, Schedule: "@annually", }, func(ctx context.Context, _ *models.User, _ Config) error { - return models.DeleteRepositoryArchives(ctx) + return repo_module.DeleteRepositoryArchives(ctx) }) } diff --git a/modules/doctor/checkOldArchives.go b/modules/doctor/checkOldArchives.go new file mode 100644 index 000000000000..a4e2ffbd1f0c --- /dev/null +++ b/modules/doctor/checkOldArchives.go @@ -0,0 +1,59 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package doctor + +import ( + "os" + "path/filepath" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/util" +) + +func checkOldArchives(logger log.Logger, autofix bool) error { + numRepos := 0 + numReposUpdated := 0 + err := iterateRepositories(func(repo *models.Repository) error { + if repo.IsEmpty { + return nil + } + + p := filepath.Join(repo.RepoPath(), "archives") + isDir, err := util.IsDir(p) + if err != nil { + log.Warn("check if %s is directory failed: %v", p, err) + } + if isDir { + numRepos++ + if autofix { + if err := os.RemoveAll(p); err == nil { + numReposUpdated++ + } else { + log.Warn("remove %s failed: %v", p, err) + } + } + } + return nil + }) + + if autofix { + logger.Info("%d / %d old archives in repository deleted", numReposUpdated, numRepos) + } else { + logger.Info("%d old archives in repository need to be deleted", numRepos) + } + + return err +} + +func init() { + Register(&Check{ + Title: "Check old archives", + Name: "check-old-archives", + IsDefault: false, + Run: checkOldArchives, + Priority: 7, + }) +} diff --git a/modules/doctor/dbversion.go b/modules/doctor/dbversion.go index f82cf7209302..c5cac3bf91fc 100644 --- a/modules/doctor/dbversion.go +++ b/modules/doctor/dbversion.go @@ -23,7 +23,7 @@ func checkDBVersion(logger log.Logger, autofix bool) error { err = models.NewEngine(context.Background(), migrations.Migrate) if err != nil { - logger.Critical("Error: %v during migration") + logger.Critical("Error: %v during migration", err) } return err } diff --git a/modules/emoji/emoji.go b/modules/emoji/emoji.go index 01fb764ce368..85df2d697307 100644 --- a/modules/emoji/emoji.go +++ b/modules/emoji/emoji.go @@ -6,6 +6,7 @@ package emoji import ( + "io" "sort" "strings" "sync" @@ -145,6 +146,8 @@ func (n *rememberSecondWriteWriter) Write(p []byte) (int, error) { if n.writecount == 2 { n.idx = n.pos n.end = n.pos + len(p) + n.pos += len(p) + return len(p), io.EOF } n.pos += len(p) return len(p), nil @@ -155,6 +158,8 @@ func (n *rememberSecondWriteWriter) WriteString(s string) (int, error) { if n.writecount == 2 { n.idx = n.pos n.end = n.pos + len(s) + n.pos += len(s) + return len(s), io.EOF } n.pos += len(s) return len(s), nil diff --git a/modules/eventsource/manager.go b/modules/eventsource/manager.go index 212fe6056969..812d6739929d 100644 --- a/modules/eventsource/manager.go +++ b/modules/eventsource/manager.go @@ -13,6 +13,7 @@ type Manager struct { mutex sync.Mutex messengers map[int64]*Messenger + connection chan struct{} } var manager *Manager @@ -20,6 +21,7 @@ var manager *Manager func init() { manager = &Manager{ messengers: make(map[int64]*Messenger), + connection: make(chan struct{}, 1), } } @@ -36,6 +38,10 @@ func (m *Manager) Register(uid int64) <-chan *Event { messenger = NewMessenger(uid) m.messengers[uid] = messenger } + select { + case m.connection <- struct{}{}: + default: + } m.mutex.Unlock() return messenger.Register() } diff --git a/modules/eventsource/manager_run.go b/modules/eventsource/manager_run.go index ccfe2e07097a..60598ecb495f 100644 --- a/modules/eventsource/manager_run.go +++ b/modules/eventsource/manager_run.go @@ -34,6 +34,35 @@ loop: timer.Stop() break loop case <-timer.C: + m.mutex.Lock() + connectionCount := len(m.messengers) + if connectionCount == 0 { + log.Trace("Event source has no listeners") + // empty the connection channel + select { + case <-m.connection: + default: + } + } + m.mutex.Unlock() + if connectionCount == 0 { + // No listeners so the source can be paused + log.Trace("Pausing the eventsource") + select { + case <-ctx.Done(): + break loop + case <-m.connection: + log.Trace("Connection detected - restarting the eventsource") + // OK we're back so lets reset the timer and start again + // We won't change the "then" time because there could be concurrency issues + select { + case <-timer.C: + default: + } + continue + } + } + now := timeutil.TimeStampNow().Add(-2) uidCounts, err := models.GetUIDsAndNotificationCounts(then, now) diff --git a/modules/generate/generate.go b/modules/generate/generate.go index 304ad87f213b..4ed2a503b004 100644 --- a/modules/generate/generate.go +++ b/modules/generate/generate.go @@ -9,31 +9,12 @@ import ( "crypto/rand" "encoding/base64" "io" - "math/big" "time" + "code.gitea.io/gitea/modules/util" "github.com/dgrijalva/jwt-go" ) -// GetRandomString generate random string by specify chars. -func GetRandomString(n int) (string, error) { - const alphanum = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" - - buffer := make([]byte, n) - max := big.NewInt(int64(len(alphanum))) - - for i := 0; i < n; i++ { - index, err := randomInt(max) - if err != nil { - return "", err - } - - buffer[i] = alphanum[index] - } - - return string(buffer), nil -} - // NewInternalToken generate a new value intended to be used by INTERNAL_TOKEN. func NewInternalToken() (string, error) { secretBytes := make([]byte, 32) @@ -57,31 +38,31 @@ func NewInternalToken() (string, error) { return internalToken, nil } -// NewJwtSecret generate a new value intended to be used by LFS_JWT_SECRET. -func NewJwtSecret() (string, error) { - JWTSecretBytes := make([]byte, 32) - _, err := io.ReadFull(rand.Reader, JWTSecretBytes) +// NewJwtSecret generates a new value intended to be used for JWT secrets. +func NewJwtSecret() ([]byte, error) { + bytes := make([]byte, 32) + _, err := io.ReadFull(rand.Reader, bytes) if err != nil { - return "", err + return nil, err } - return base64.RawURLEncoding.EncodeToString(JWTSecretBytes), nil + return bytes, nil } -// NewSecretKey generate a new value intended to be used by SECRET_KEY. -func NewSecretKey() (string, error) { - secretKey, err := GetRandomString(64) +// NewJwtSecretBase64 generates a new base64 encoded value intended to be used for JWT secrets. +func NewJwtSecretBase64() (string, error) { + bytes, err := NewJwtSecret() if err != nil { return "", err } - - return secretKey, nil + return base64.RawURLEncoding.EncodeToString(bytes), nil } -func randomInt(max *big.Int) (int, error) { - rand, err := rand.Int(rand.Reader, max) +// NewSecretKey generate a new value intended to be used by SECRET_KEY. +func NewSecretKey() (string, error) { + secretKey, err := util.RandomString(64) if err != nil { - return 0, err + return "", err } - return int(rand.Int64()), nil + return secretKey, nil } diff --git a/modules/generate/generate_test.go b/modules/generate/generate_test.go deleted file mode 100644 index 1cacfe66812c..000000000000 --- a/modules/generate/generate_test.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2020 The Gitea Authors. All rights reserved. -// Use of this source code is governed by a MIT-style -// license that can be found in the LICENSE file. - -package generate - -import ( - "os" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestMain(m *testing.M) { - retVal := m.Run() - - os.Exit(retVal) -} - -func TestGetRandomString(t *testing.T) { - randomString, err := GetRandomString(4) - assert.NoError(t, err) - assert.Len(t, randomString, 4) -} diff --git a/modules/git/batch_reader.go b/modules/git/batch_reader.go index 1905067cb4a3..bdf82bde8913 100644 --- a/modules/git/batch_reader.go +++ b/modules/git/batch_reader.go @@ -11,14 +11,54 @@ import ( "math" "strconv" "strings" + + "code.gitea.io/gitea/modules/log" + + "github.com/djherbis/buffer" + "github.com/djherbis/nio/v3" ) +// WriteCloserError wraps an io.WriteCloser with an additional CloseWithError function +type WriteCloserError interface { + io.WriteCloser + CloseWithError(err error) error +} + +// CatFileBatchCheck opens git cat-file --batch-check in the provided repo and returns a stdin pipe, a stdout reader and cancel function +func CatFileBatchCheck(repoPath string) (WriteCloserError, *bufio.Reader, func()) { + batchStdinReader, batchStdinWriter := io.Pipe() + batchStdoutReader, batchStdoutWriter := io.Pipe() + cancel := func() { + _ = batchStdinReader.Close() + _ = batchStdinWriter.Close() + _ = batchStdoutReader.Close() + _ = batchStdoutWriter.Close() + } + + go func() { + stderr := strings.Builder{} + err := NewCommand("cat-file", "--batch-check").RunInDirFullPipeline(repoPath, batchStdoutWriter, &stderr, batchStdinReader) + if err != nil { + _ = batchStdoutWriter.CloseWithError(ConcatenateError(err, (&stderr).String())) + _ = batchStdinReader.CloseWithError(ConcatenateError(err, (&stderr).String())) + } else { + _ = batchStdoutWriter.Close() + _ = batchStdinReader.Close() + } + }() + + // For simplicities sake we'll use a buffered reader to read from the cat-file --batch-check + batchReader := bufio.NewReader(batchStdoutReader) + + return batchStdinWriter, batchReader, cancel +} + // CatFileBatch opens git cat-file --batch in the provided repo and returns a stdin pipe, a stdout reader and cancel function -func CatFileBatch(repoPath string) (*io.PipeWriter, *bufio.Reader, func()) { - // Next feed the commits in order into cat-file --batch, followed by their trees and sub trees as necessary. +func CatFileBatch(repoPath string) (WriteCloserError, *bufio.Reader, func()) { + // We often want to feed the commits in order into cat-file --batch, followed by their trees and sub trees as necessary. // so let's create a batch stdin and stdout batchStdinReader, batchStdinWriter := io.Pipe() - batchStdoutReader, batchStdoutWriter := io.Pipe() + batchStdoutReader, batchStdoutWriter := nio.Pipe(buffer.New(32 * 1024)) cancel := func() { _ = batchStdinReader.Close() _ = batchStdinWriter.Close() @@ -39,7 +79,7 @@ func CatFileBatch(repoPath string) (*io.PipeWriter, *bufio.Reader, func()) { }() // For simplicities sake we'll us a buffered reader to read from the cat-file --batch - batchReader := bufio.NewReader(batchStdoutReader) + batchReader := bufio.NewReaderSize(batchStdoutReader, 32*1024) return batchStdinWriter, batchReader, cancel } @@ -47,26 +87,37 @@ func CatFileBatch(repoPath string) (*io.PipeWriter, *bufio.Reader, func()) { // ReadBatchLine reads the header line from cat-file --batch // We expect: // SP SP LF +// sha is a 40byte not 20byte here func ReadBatchLine(rd *bufio.Reader) (sha []byte, typ string, size int64, err error) { - sha, err = rd.ReadBytes(' ') + typ, err = rd.ReadString('\n') if err != nil { return } - sha = sha[:len(sha)-1] - - typ, err = rd.ReadString(' ') - if err != nil { + if len(typ) == 1 { + typ, err = rd.ReadString('\n') + if err != nil { + return + } + } + idx := strings.IndexByte(typ, ' ') + if idx < 0 { + log.Debug("missing space typ: %s", typ) + err = ErrNotExist{ID: string(sha)} return } - typ = typ[:len(typ)-1] + sha = []byte(typ[:idx]) + typ = typ[idx+1:] - var sizeStr string - sizeStr, err = rd.ReadString('\n') - if err != nil { + idx = strings.IndexByte(typ, ' ') + if idx < 0 { + err = ErrNotExist{ID: string(sha)} return } - size, err = strconv.ParseInt(sizeStr[:len(sizeStr)-1], 10, 64) + sizeStr := typ[idx+1 : len(typ)-1] + typ = typ[:idx] + + size, err = strconv.ParseInt(sizeStr, 10, 64) return } @@ -93,7 +144,7 @@ headerLoop: } // Discard the rest of the tag - discard := size - n + discard := size - n + 1 for discard > math.MaxInt32 { _, err := rd.Discard(math.MaxInt32) if err != nil { @@ -128,7 +179,7 @@ headerLoop: } // Discard the rest of the commit - discard := size - n + discard := size - n + 1 for discard > math.MaxInt32 { _, err := rd.Discard(math.MaxInt32) if err != nil { @@ -149,98 +200,56 @@ headerLoop: // constant hextable to help quickly convert between 20byte and 40byte hashes const hextable = "0123456789abcdef" -// To40ByteSHA converts a 20-byte SHA in a 40-byte slice into a 40-byte sha in place -// without allocations. This is at least 100x quicker that hex.EncodeToString -// NB This requires that sha is a 40-byte slice -func To40ByteSHA(sha []byte) []byte { +// To40ByteSHA converts a 20-byte SHA into a 40-byte sha. Input and output can be the +// same 40 byte slice to support in place conversion without allocations. +// This is at least 100x quicker that hex.EncodeToString +// NB This requires that out is a 40-byte slice +func To40ByteSHA(sha, out []byte) []byte { for i := 19; i >= 0; i-- { v := sha[i] vhi, vlo := v>>4, v&0x0f shi, slo := hextable[vhi], hextable[vlo] - sha[i*2], sha[i*2+1] = shi, slo + out[i*2], out[i*2+1] = shi, slo } - return sha + return out } -// ParseTreeLineSkipMode reads an entry from a tree in a cat-file --batch stream -// This simply skips the mode - saving a substantial amount of time and carefully avoids allocations - except where fnameBuf is too small. +// ParseTreeLine reads an entry from a tree in a cat-file --batch stream +// This carefully avoids allocations - except where fnameBuf is too small. // It is recommended therefore to pass in an fnameBuf large enough to avoid almost all allocations // // Each line is composed of: // SP NUL <20-byte SHA> // // We don't attempt to convert the 20-byte SHA to 40-byte SHA to save a lot of time -func ParseTreeLineSkipMode(rd *bufio.Reader, fnameBuf, shaBuf []byte) (fname, sha []byte, n int, err error) { +func ParseTreeLine(rd *bufio.Reader, modeBuf, fnameBuf, shaBuf []byte) (mode, fname, sha []byte, n int, err error) { var readBytes []byte - // Skip the Mode - readBytes, err = rd.ReadSlice(' ') // NB: DOES NOT ALLOCATE SIMPLY RETURNS SLICE WITHIN READER BUFFER - if err != nil { - return - } - n += len(readBytes) - // Deal with the fname + // Read the Mode & fname readBytes, err = rd.ReadSlice('\x00') - copy(fnameBuf, readBytes) - if len(fnameBuf) > len(readBytes) { - fnameBuf = fnameBuf[:len(readBytes)] // cut the buf the correct size - } else { - fnameBuf = append(fnameBuf, readBytes[len(fnameBuf):]...) // extend the buf and copy in the missing bits - } - for err == bufio.ErrBufferFull { // Then we need to read more - readBytes, err = rd.ReadSlice('\x00') - fnameBuf = append(fnameBuf, readBytes...) // there is little point attempting to avoid allocations here so just extend - } - n += len(fnameBuf) if err != nil { return } - fnameBuf = fnameBuf[:len(fnameBuf)-1] // Drop the terminal NUL - fname = fnameBuf // set the returnable fname to the slice + idx := bytes.IndexByte(readBytes, ' ') + if idx < 0 { + log.Debug("missing space in readBytes ParseTreeLine: %s", readBytes) - // Now deal with the 20-byte SHA - idx := 0 - for idx < 20 { - read := 0 - read, err = rd.Read(shaBuf[idx:20]) - n += read - if err != nil { - return - } - idx += read - } - sha = shaBuf - return -} - -// ParseTreeLine reads an entry from a tree in a cat-file --batch stream -// This carefully avoids allocations - except where fnameBuf is too small. -// It is recommended therefore to pass in an fnameBuf large enough to avoid almost all allocations -// -// Each line is composed of: -// SP NUL <20-byte SHA> -// -// We don't attempt to convert the 20-byte SHA to 40-byte SHA to save a lot of time -func ParseTreeLine(rd *bufio.Reader, modeBuf, fnameBuf, shaBuf []byte) (mode, fname, sha []byte, n int, err error) { - var readBytes []byte - - // Read the Mode - readBytes, err = rd.ReadSlice(' ') - if err != nil { + err = &ErrNotExist{} return } - n += len(readBytes) - copy(modeBuf, readBytes) - if len(modeBuf) > len(readBytes) { - modeBuf = modeBuf[:len(readBytes)] - } else { - modeBuf = append(modeBuf, readBytes[len(modeBuf):]...) + n += idx + 1 + copy(modeBuf, readBytes[:idx]) + if len(modeBuf) >= idx { + modeBuf = modeBuf[:idx] + } else { + modeBuf = append(modeBuf, readBytes[len(modeBuf):idx]...) } - mode = modeBuf[:len(modeBuf)-1] // Drop the SP + mode = modeBuf + + readBytes = readBytes[idx+1:] // Deal with the fname - readBytes, err = rd.ReadSlice('\x00') copy(fnameBuf, readBytes) if len(fnameBuf) > len(readBytes) { fnameBuf = fnameBuf[:len(readBytes)] @@ -259,7 +268,7 @@ func ParseTreeLine(rd *bufio.Reader, modeBuf, fnameBuf, shaBuf []byte) (mode, fn fname = fnameBuf // Deal with the 20-byte SHA - idx := 0 + idx = 0 for idx < 20 { read := 0 read, err = rd.Read(shaBuf[idx:20]) diff --git a/modules/git/blame.go b/modules/git/blame.go index c2129c9e1cf1..fcbf183981c3 100644 --- a/modules/git/blame.go +++ b/modules/git/blame.go @@ -34,7 +34,7 @@ type BlameReader struct { var shaLineRegex = regexp.MustCompile("^([a-z0-9]{40})") -// NextPart returns next part of blame (sequencial code lines with the same commit) +// NextPart returns next part of blame (sequential code lines with the same commit) func (r *BlameReader) NextPart() (*BlamePart, error) { var blamePart *BlamePart diff --git a/modules/git/blob.go b/modules/git/blob.go index 674a6a959277..5831bc3735aa 100644 --- a/modules/git/blob.go +++ b/modules/git/blob.go @@ -10,6 +10,8 @@ import ( "encoding/base64" "io" "io/ioutil" + + "code.gitea.io/gitea/modules/typesniffer" ) // This file contains common functions between the gogit and !gogit variants for git Blobs @@ -32,7 +34,7 @@ func (b *Blob) GetBlobContent() (string, error) { return string(buf), nil } -// GetBlobLineCount gets line count of lob as raw text +// GetBlobLineCount gets line count of the blob func (b *Blob) GetBlobLineCount() (int, error) { reader, err := b.DataAsync() if err != nil { @@ -40,10 +42,14 @@ func (b *Blob) GetBlobLineCount() (int, error) { } defer reader.Close() buf := make([]byte, 32*1024) - count := 0 + count := 1 lineSep := []byte{'\n'} + + c, err := reader.Read(buf) + if c == 0 && err == io.EOF { + return 0, nil + } for { - c, err := reader.Read(buf) count += bytes.Count(buf[:c], lineSep) switch { case err == io.EOF: @@ -51,6 +57,7 @@ func (b *Blob) GetBlobLineCount() (int, error) { case err != nil: return count, err } + c, err = reader.Read(buf) } } @@ -82,3 +89,14 @@ func (b *Blob) GetBlobContentBase64() (string, error) { } return string(out), nil } + +// GuessContentType guesses the content type of the blob. +func (b *Blob) GuessContentType() (typesniffer.SniffedType, error) { + r, err := b.DataAsync() + if err != nil { + return typesniffer.SniffedType{}, err + } + defer r.Close() + + return typesniffer.DetectContentTypeFromReader(r) +} diff --git a/modules/git/blob_nogogit.go b/modules/git/blob_nogogit.go index e917a316195d..5b42920ebe1d 100644 --- a/modules/git/blob_nogogit.go +++ b/modules/git/blob_nogogit.go @@ -8,48 +8,56 @@ package git import ( "bufio" + "bytes" "io" - "strconv" - "strings" + "io/ioutil" + "math" + + "code.gitea.io/gitea/modules/log" ) // Blob represents a Git object. type Blob struct { ID SHA1 - gotSize bool - size int64 - repoPath string - name string + gotSize bool + size int64 + name string + repo *Repository } // DataAsync gets a ReadCloser for the contents of a blob without reading it all. // Calling the Close function on the result will discard all unread output. func (b *Blob) DataAsync() (io.ReadCloser, error) { - stdoutReader, stdoutWriter := io.Pipe() - - go func() { - stderr := &strings.Builder{} - err := NewCommand("cat-file", "--batch").RunInDirFullPipeline(b.repoPath, stdoutWriter, stderr, strings.NewReader(b.ID.String()+"\n")) - if err != nil { - err = ConcatenateError(err, stderr.String()) - _ = stdoutWriter.CloseWithError(err) - } else { - _ = stdoutWriter.Close() - } - }() + wr, rd, cancel := b.repo.CatFileBatch() - bufReader := bufio.NewReader(stdoutReader) - _, _, size, err := ReadBatchLine(bufReader) + _, err := wr.Write([]byte(b.ID.String() + "\n")) + if err != nil { + cancel() + return nil, err + } + _, _, size, err := ReadBatchLine(rd) if err != nil { - stdoutReader.Close() + cancel() return nil, err } + b.gotSize = true + b.size = size - return &LimitedReaderCloser{ - R: bufReader, - C: stdoutReader, - N: size, + if size < 4096 { + bs, err := ioutil.ReadAll(io.LimitReader(rd, size)) + if err != nil { + cancel() + return nil, err + } + _, err = rd.Discard(1) + return ioutil.NopCloser(bytes.NewReader(bs)), err + } + + return &blobReader{ + rd: rd, + n: size, + cancel: cancel, }, nil } @@ -59,18 +67,66 @@ func (b *Blob) Size() int64 { return b.size } - size, err := NewCommand("cat-file", "-s", b.ID.String()).RunInDir(b.repoPath) + wr, rd, cancel := b.repo.CatFileBatchCheck() + defer cancel() + _, err := wr.Write([]byte(b.ID.String() + "\n")) if err != nil { - log("error whilst reading size for %s in %s. Error: %v", b.ID.String(), b.repoPath, err) + log.Debug("error whilst reading size for %s in %s. Error: %v", b.ID.String(), b.repo.Path, err) return 0 } - - b.size, err = strconv.ParseInt(size[:len(size)-1], 10, 64) + _, _, b.size, err = ReadBatchLine(rd) if err != nil { - log("error whilst parsing size %s for %s in %s. Error: %v", size, b.ID.String(), b.repoPath, err) + log.Debug("error whilst reading size for %s in %s. Error: %v", b.ID.String(), b.repo.Path, err) return 0 } + b.gotSize = true return b.size } + +type blobReader struct { + rd *bufio.Reader + n int64 + cancel func() +} + +func (b *blobReader) Read(p []byte) (n int, err error) { + if b.n <= 0 { + return 0, io.EOF + } + if int64(len(p)) > b.n { + p = p[0:b.n] + } + n, err = b.rd.Read(p) + b.n -= int64(n) + return +} + +// Close implements io.Closer +func (b *blobReader) Close() error { + if b.n > 0 { + for b.n > math.MaxInt32 { + n, err := b.rd.Discard(math.MaxInt32) + b.n -= int64(n) + if err != nil { + b.cancel() + return err + } + b.n -= math.MaxInt32 + } + n, err := b.rd.Discard(int(b.n)) + b.n -= int64(n) + if err != nil { + b.cancel() + return err + } + } + if b.n == 0 { + _, err := b.rd.Discard(1) + b.n-- + b.cancel() + return err + } + return nil +} diff --git a/modules/git/blob_test.go b/modules/git/blob_test.go index d02251ed9024..2ceda6c4ef17 100644 --- a/modules/git/blob_test.go +++ b/modules/git/blob_test.go @@ -29,9 +29,10 @@ func TestBlob_Data(t *testing.T) { r, err := testBlob.DataAsync() assert.NoError(t, err) require.NotNil(t, r) - defer r.Close() data, err := ioutil.ReadAll(r) + assert.NoError(t, r.Close()) + assert.NoError(t, err) assert.Equal(t, output, string(data)) } @@ -54,7 +55,7 @@ func Benchmark_Blob_Data(b *testing.B) { if err != nil { b.Fatal(err) } - defer r.Close() ioutil.ReadAll(r) + _ = r.Close() } } diff --git a/modules/git/command.go b/modules/git/command.go index fe258954628e..d83c42fdc218 100644 --- a/modules/git/command.go +++ b/modules/git/command.go @@ -15,6 +15,7 @@ import ( "strings" "time" + "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/process" ) @@ -22,8 +23,8 @@ var ( // GlobalCommandArgs global command args for external package setting GlobalCommandArgs []string - // DefaultCommandExecutionTimeout default command execution timeout duration - DefaultCommandExecutionTimeout = 360 * time.Second + // defaultCommandExecutionTimeout default command execution timeout duration + defaultCommandExecutionTimeout = 360 * time.Second ) // DefaultLocale is the default LC_ALL to run git commands in. @@ -110,13 +111,13 @@ func (c *Command) RunInDirTimeoutEnvFullPipeline(env []string, timeout time.Dura // it pipes stdout and stderr to given io.Writer and passes in an io.Reader as stdin. Between cmd.Start and cmd.Wait the passed in function is run. func (c *Command) RunInDirTimeoutEnvFullPipelineFunc(env []string, timeout time.Duration, dir string, stdout, stderr io.Writer, stdin io.Reader, fn func(context.Context, context.CancelFunc) error) error { if timeout == -1 { - timeout = DefaultCommandExecutionTimeout + timeout = defaultCommandExecutionTimeout } if len(dir) == 0 { - log(c.String()) + log.Debug("%s", c) } else { - log("%s: %v", dir, c) + log.Debug("%s: %v", dir, c) } ctx, cancel := context.WithTimeout(c.parentContext, timeout) @@ -124,12 +125,18 @@ func (c *Command) RunInDirTimeoutEnvFullPipelineFunc(env []string, timeout time. cmd := exec.CommandContext(ctx, c.name, c.args...) if env == nil { - cmd.Env = append(os.Environ(), fmt.Sprintf("LC_ALL=%s", DefaultLocale)) + cmd.Env = os.Environ() } else { cmd.Env = env - cmd.Env = append(cmd.Env, fmt.Sprintf("LC_ALL=%s", DefaultLocale)) } + cmd.Env = append( + cmd.Env, + fmt.Sprintf("LC_ALL=%s", DefaultLocale), + // avoid prompting for credentials interactively, supported since git v2.3 + "GIT_TERMINAL_PROMPT=0", + ) + // TODO: verify if this is still needed in golang 1.15 if goVersionLessThan115 { cmd.Env = append(cmd.Env, "GODEBUG=asyncpreemptoff=1") @@ -191,9 +198,12 @@ func (c *Command) RunInDirTimeoutEnv(env []string, timeout time.Duration, dir st if err := c.RunInDirTimeoutEnvPipeline(env, timeout, dir, stdout, stderr); err != nil { return nil, ConcatenateError(err, stderr.String()) } - - if stdout.Len() > 0 { - log("stdout:\n%s", stdout.Bytes()[:1024]) + if stdout.Len() > 0 && log.IsTrace() { + tracelen := stdout.Len() + if tracelen > 1024 { + tracelen = 1024 + } + log.Trace("Stdout:\n %s", stdout.Bytes()[:tracelen]) } return stdout.Bytes(), nil } diff --git a/modules/git/commit.go b/modules/git/commit.go index 027642720d0a..3ce2b03886d1 100644 --- a/modules/git/commit.go +++ b/modules/git/commit.go @@ -11,16 +11,12 @@ import ( "container/list" "errors" "fmt" - "image" - "image/color" - _ "image/gif" // for processing gif images - _ "image/jpeg" // for processing jpeg images - _ "image/png" // for processing png images "io" - "net/http" "os/exec" "strconv" "strings" + + "code.gitea.io/gitea/modules/log" ) // Commit represents a git commit. @@ -81,70 +77,6 @@ func (c *Commit) ParentCount() int { return len(c.Parents) } -func isImageFile(data []byte) (string, bool) { - contentType := http.DetectContentType(data) - if strings.Contains(contentType, "image/") { - return contentType, true - } - return contentType, false -} - -// IsImageFile is a file image type -func (c *Commit) IsImageFile(name string) bool { - blob, err := c.GetBlobByPath(name) - if err != nil { - return false - } - - dataRc, err := blob.DataAsync() - if err != nil { - return false - } - defer dataRc.Close() - buf := make([]byte, 1024) - n, _ := dataRc.Read(buf) - buf = buf[:n] - _, isImage := isImageFile(buf) - return isImage -} - -// ImageMetaData represents metadata of an image file -type ImageMetaData struct { - ColorModel color.Model - Width int - Height int - ByteSize int64 -} - -// ImageInfo returns information about the dimensions of an image -func (c *Commit) ImageInfo(name string) (*ImageMetaData, error) { - if !c.IsImageFile(name) { - return nil, nil - } - - blob, err := c.GetBlobByPath(name) - if err != nil { - return nil, err - } - reader, err := blob.DataAsync() - if err != nil { - return nil, err - } - defer reader.Close() - config, _, err := image.DecodeConfig(reader) - if err != nil { - return nil, err - } - - metadata := ImageMetaData{ - ColorModel: config.ColorModel, - Width: config.Width, - Height: config.Height, - ByteSize: blob.Size(), - } - return &metadata, nil -} - // GetCommitByPath return the commit of relative path object. func (c *Commit) GetCommitByPath(relpath string) (*Commit, error) { return c.repo.getCommitByPathWithID(c.ID, relpath) @@ -502,33 +434,59 @@ func NewCommitFileStatus() *CommitFileStatus { } } +func parseCommitFileStatus(fileStatus *CommitFileStatus, stdout io.Reader) { + rd := bufio.NewReader(stdout) + peek, err := rd.Peek(1) + if err != nil { + if err != io.EOF { + log.Error("Unexpected error whilst reading from git log --name-status. Error: %v", err) + } + return + } + if peek[0] == '\n' || peek[0] == '\x00' { + _, _ = rd.Discard(1) + } + for { + modifier, err := rd.ReadSlice('\x00') + if err != nil { + if err != io.EOF { + log.Error("Unexpected error whilst reading from git log --name-status. Error: %v", err) + } + return + } + file, err := rd.ReadString('\x00') + if err != nil { + if err != io.EOF { + log.Error("Unexpected error whilst reading from git log --name-status. Error: %v", err) + } + return + } + file = file[:len(file)-1] + switch modifier[0] { + case 'A': + fileStatus.Added = append(fileStatus.Added, file) + case 'D': + fileStatus.Removed = append(fileStatus.Removed, file) + case 'M': + fileStatus.Modified = append(fileStatus.Modified, file) + } + } +} + // GetCommitFileStatus returns file status of commit in given repository. func GetCommitFileStatus(repoPath, commitID string) (*CommitFileStatus, error) { stdout, w := io.Pipe() done := make(chan struct{}) fileStatus := NewCommitFileStatus() go func() { - scanner := bufio.NewScanner(stdout) - for scanner.Scan() { - fields := strings.Fields(scanner.Text()) - if len(fields) < 2 { - continue - } - - switch fields[0][0] { - case 'A': - fileStatus.Added = append(fileStatus.Added, fields[1]) - case 'D': - fileStatus.Removed = append(fileStatus.Removed, fields[1]) - case 'M': - fileStatus.Modified = append(fileStatus.Modified, fields[1]) - } - } - done <- struct{}{} + parseCommitFileStatus(fileStatus, stdout) + close(done) }() stderr := new(bytes.Buffer) - err := NewCommand("show", "--name-status", "--pretty=format:''", commitID).RunInDirPipeline(repoPath, w, stderr) + args := []string{"log", "--name-status", "-c", "--pretty=format:", "--parents", "--no-renames", "-z", "-1", commitID} + + err := NewCommand(args...).RunInDirPipeline(repoPath, w, stderr) w.Close() // Close writer to exit parsing goroutine if err != nil { return nil, ConcatenateError(err, stderr.String()) diff --git a/modules/git/commit_info_gogit.go b/modules/git/commit_info_gogit.go index 6d95e22d0c7c..a8006dcef2e6 100644 --- a/modules/git/commit_info_gogit.go +++ b/modules/git/commit_info_gogit.go @@ -7,6 +7,7 @@ package git import ( + "context" "path" "github.com/emirpasic/gods/trees/binaryheap" @@ -16,7 +17,7 @@ import ( ) // GetCommitsInfo gets information of all commits that are corresponding to these entries -func (tes Entries) GetCommitsInfo(commit *Commit, treePath string, cache *LastCommitCache) ([]CommitInfo, *Commit, error) { +func (tes Entries) GetCommitsInfo(ctx context.Context, commit *Commit, treePath string, cache *LastCommitCache) ([]CommitInfo, *Commit, error) { entryPaths := make([]string, len(tes)+1) // Get the commit for the treePath itself entryPaths[0] = "" @@ -42,7 +43,7 @@ func (tes Entries) GetCommitsInfo(commit *Commit, treePath string, cache *LastCo return nil, nil, err } if len(unHitPaths) > 0 { - revs2, err := GetLastCommitForPaths(c, treePath, unHitPaths) + revs2, err := GetLastCommitForPaths(ctx, c, treePath, unHitPaths) if err != nil { return nil, nil, err } @@ -55,7 +56,7 @@ func (tes Entries) GetCommitsInfo(commit *Commit, treePath string, cache *LastCo } } } else { - revs, err = GetLastCommitForPaths(c, treePath, entryPaths) + revs, err = GetLastCommitForPaths(ctx, c, treePath, entryPaths) } if err != nil { return nil, nil, err @@ -173,7 +174,7 @@ func getLastCommitForPathsByCache(commitID, treePath string, paths []string, cac } // GetLastCommitForPaths returns last commit information -func GetLastCommitForPaths(c cgobject.CommitNode, treePath string, paths []string) (map[string]*object.Commit, error) { +func GetLastCommitForPaths(ctx context.Context, c cgobject.CommitNode, treePath string, paths []string) (map[string]*object.Commit, error) { // We do a tree traversal with nodes sorted by commit time heap := binaryheap.NewWith(func(a, b interface{}) int { if a.(*commitAndPaths).commit.CommitTime().Before(b.(*commitAndPaths).commit.CommitTime()) { @@ -192,6 +193,11 @@ func GetLastCommitForPaths(c cgobject.CommitNode, treePath string, paths []strin heap.Push(&commitAndPaths{c, paths, initialHashes}) for { + select { + case <-ctx.Done(): + return nil, ctx.Err() + default: + } cIn, ok := heap.Pop() if !ok { break diff --git a/modules/git/commit_info_nogogit.go b/modules/git/commit_info_nogogit.go index 6dd7b11ba79c..060ecba26190 100644 --- a/modules/git/commit_info_nogogit.go +++ b/modules/git/commit_info_nogogit.go @@ -7,18 +7,17 @@ package git import ( - "bufio" - "bytes" + "context" "fmt" "io" - "math" "path" "sort" - "strings" + + "code.gitea.io/gitea/modules/log" ) // GetCommitsInfo gets information of all commits that are corresponding to these entries -func (tes Entries) GetCommitsInfo(commit *Commit, treePath string, cache *LastCommitCache) ([]CommitInfo, *Commit, error) { +func (tes Entries) GetCommitsInfo(ctx context.Context, commit *Commit, treePath string, cache *LastCommitCache) ([]CommitInfo, *Commit, error) { entryPaths := make([]string, len(tes)+1) // Get the commit for the treePath itself entryPaths[0] = "" @@ -31,32 +30,27 @@ func (tes Entries) GetCommitsInfo(commit *Commit, treePath string, cache *LastCo var revs map[string]*Commit if cache != nil { var unHitPaths []string - revs, unHitPaths, err = getLastCommitForPathsByCache(commit.ID.String(), treePath, entryPaths, cache) + revs, unHitPaths, err = getLastCommitForPathsByCache(ctx, commit.ID.String(), treePath, entryPaths, cache) if err != nil { return nil, nil, err } if len(unHitPaths) > 0 { sort.Strings(unHitPaths) - commits, err := GetLastCommitForPaths(commit, treePath, unHitPaths) + commits, err := GetLastCommitForPaths(ctx, commit, treePath, unHitPaths) if err != nil { return nil, nil, err } - for i, found := range commits { - if err := cache.Put(commit.ID.String(), path.Join(treePath, unHitPaths[i]), found.ID.String()); err != nil { + for pth, found := range commits { + if err := cache.Put(commit.ID.String(), path.Join(treePath, pth), found.ID.String()); err != nil { return nil, nil, err } - revs[unHitPaths[i]] = found + revs[pth] = found } } } else { sort.Strings(entryPaths) - revs = map[string]*Commit{} - var foundCommits []*Commit - foundCommits, err = GetLastCommitForPaths(commit, treePath, entryPaths) - for i, found := range foundCommits { - revs[entryPaths[i]] = found - } + revs, err = GetLastCommitForPaths(ctx, commit, treePath, entryPaths) } if err != nil { return nil, nil, err @@ -85,6 +79,8 @@ func (tes Entries) GetCommitsInfo(commit *Commit, treePath string, cache *LastCo subModuleFile := NewSubModuleFile(entryCommit, subModuleURL, entry.ID.String()) commitsInfo[i].SubModuleFile = subModuleFile } + } else { + log.Debug("missing commit for %s", entry.Name()) } } @@ -101,8 +97,8 @@ func (tes Entries) GetCommitsInfo(commit *Commit, treePath string, cache *LastCo return commitsInfo, treeCommit, nil } -func getLastCommitForPathsByCache(commitID, treePath string, paths []string, cache *LastCommitCache) (map[string]*Commit, []string, error) { - wr, rd, cancel := CatFileBatch(cache.repo.Path) +func getLastCommitForPathsByCache(ctx context.Context, commitID, treePath string, paths []string, cache *LastCommitCache) (map[string]*Commit, []string, error) { + wr, rd, cancel := cache.repo.CatFileBatch() defer cancel() var unHitEntryPaths []string @@ -124,205 +120,24 @@ func getLastCommitForPathsByCache(commitID, treePath string, paths []string, cac } // GetLastCommitForPaths returns last commit information -func GetLastCommitForPaths(commit *Commit, treePath string, paths []string) ([]*Commit, error) { +func GetLastCommitForPaths(ctx context.Context, commit *Commit, treePath string, paths []string) (map[string]*Commit, error) { // We read backwards from the commit to obtain all of the commits - - // We'll do this by using rev-list to provide us with parent commits in order - revListReader, revListWriter := io.Pipe() - defer func() { - _ = revListWriter.Close() - _ = revListReader.Close() - }() - - go func() { - stderr := strings.Builder{} - err := NewCommand("rev-list", "--format=%T", commit.ID.String()).RunInDirPipeline(commit.repo.Path, revListWriter, &stderr) - if err != nil { - _ = revListWriter.CloseWithError(ConcatenateError(err, (&stderr).String())) - } else { - _ = revListWriter.Close() - } - }() - - batchStdinWriter, batchReader, cancel := CatFileBatch(commit.repo.Path) - defer cancel() - - mapsize := 4096 - if len(paths) > mapsize { - mapsize = len(paths) - } - - path2idx := make(map[string]int, mapsize) - for i, path := range paths { - path2idx[path] = i + revs, err := WalkGitLog(ctx, commit.repo, commit, treePath, paths...) + if err != nil { + return nil, err } - fnameBuf := make([]byte, 4096) - modeBuf := make([]byte, 40) - - allShaBuf := make([]byte, (len(paths)+1)*20) - shaBuf := make([]byte, 20) - tmpTreeID := make([]byte, 40) - - // commits is the returnable commits matching the paths provided - commits := make([]string, len(paths)) - // ids are the blob/tree ids for the paths - ids := make([][]byte, len(paths)) - - // We'll use a scanner for the revList because it's simpler than a bufio.Reader - scan := bufio.NewScanner(revListReader) -revListLoop: - for scan.Scan() { - // Get the next parent commit ID - commitID := scan.Text() - if !scan.Scan() { - break revListLoop - } - commitID = commitID[7:] - rootTreeID := scan.Text() - - // push the tree to the cat-file --batch process - _, err := batchStdinWriter.Write([]byte(rootTreeID + "\n")) - if err != nil { - return nil, err - } - - currentPath := "" - - // OK if the target tree path is "" and the "" is in the paths just set this now - if treePath == "" && paths[0] == "" { - // If this is the first time we see this set the id appropriate for this paths to this tree and set the last commit to curCommit - if len(ids[0]) == 0 { - ids[0] = []byte(rootTreeID) - commits[0] = string(commitID) - } else if bytes.Equal(ids[0], []byte(rootTreeID)) { - commits[0] = string(commitID) - } - } - - treeReadingLoop: - for { - _, _, size, err := ReadBatchLine(batchReader) - if err != nil { - return nil, err - } - - // Handle trees - - // n is counter for file position in the tree file - var n int64 - - // Two options: currentPath is the targetTreepath - if treePath == currentPath { - // We are in the right directory - // Parse each tree line in turn. (don't care about mode here.) - for n < size { - fname, sha, count, err := ParseTreeLineSkipMode(batchReader, fnameBuf, shaBuf) - shaBuf = sha - if err != nil { - return nil, err - } - n += int64(count) - idx, ok := path2idx[string(fname)] - if ok { - // Now if this is the first time round set the initial Blob(ish) SHA ID and the commit - if len(ids[idx]) == 0 { - copy(allShaBuf[20*(idx+1):20*(idx+2)], shaBuf) - ids[idx] = allShaBuf[20*(idx+1) : 20*(idx+2)] - commits[idx] = string(commitID) - } else if bytes.Equal(ids[idx], shaBuf) { - commits[idx] = string(commitID) - } - } - // FIXME: is there any order to the way strings are emitted from cat-file? - // if there is - then we could skip once we've passed all of our data - } - break treeReadingLoop - } - - var treeID []byte - - // We're in the wrong directory - // Find target directory in this directory - idx := len(currentPath) - if idx > 0 { - idx++ - } - target := strings.SplitN(treePath[idx:], "/", 2)[0] - - for n < size { - // Read each tree entry in turn - mode, fname, sha, count, err := ParseTreeLine(batchReader, modeBuf, fnameBuf, shaBuf) - if err != nil { - return nil, err - } - n += int64(count) - - // if we have found the target directory - if bytes.Equal(fname, []byte(target)) && bytes.Equal(mode, []byte("40000")) { - copy(tmpTreeID, sha) - treeID = tmpTreeID - break - } - } - - if n < size { - // Discard any remaining entries in the current tree - discard := size - n - for discard > math.MaxInt32 { - _, err := batchReader.Discard(math.MaxInt32) - if err != nil { - return nil, err - } - discard -= math.MaxInt32 - } - _, err := batchReader.Discard(int(discard)) - if err != nil { - return nil, err - } - } - - // if we haven't found a treeID for the target directory our search is over - if len(treeID) == 0 { - break treeReadingLoop - } - - // add the target to the current path - if idx > 0 { - currentPath += "/" - } - currentPath += target - - // if we've now found the current path check its sha id and commit status - if treePath == currentPath && paths[0] == "" { - if len(ids[0]) == 0 { - copy(allShaBuf[0:20], treeID) - ids[0] = allShaBuf[0:20] - commits[0] = string(commitID) - } else if bytes.Equal(ids[0], treeID) { - commits[0] = string(commitID) - } - } - treeID = To40ByteSHA(treeID) - _, err = batchStdinWriter.Write(treeID) - if err != nil { - return nil, err - } - _, err = batchStdinWriter.Write([]byte("\n")) - if err != nil { - return nil, err - } - } - } + batchStdinWriter, batchReader, cancel := commit.repo.CatFileBatch() + defer cancel() - commitsMap := make(map[string]*Commit, len(commits)) + commitsMap := map[string]*Commit{} commitsMap[commit.ID.String()] = commit - commitCommits := make([]*Commit, len(commits)) - for i, commitID := range commits { + commitCommits := map[string]*Commit{} + for path, commitID := range revs { c, ok := commitsMap[commitID] if ok { - commitCommits[i] = c + commitCommits[path] = c continue } @@ -345,8 +160,11 @@ revListLoop: if err != nil { return nil, err } - commitCommits[i] = c + if _, err := batchReader.Discard(1); err != nil { + return nil, err + } + commitCommits[path] = c } - return commitCommits, scan.Err() + return commitCommits, nil } diff --git a/modules/git/commit_info_test.go b/modules/git/commit_info_test.go index 3966419bc146..0608801f9df4 100644 --- a/modules/git/commit_info_test.go +++ b/modules/git/commit_info_test.go @@ -5,6 +5,7 @@ package git import ( + "context" "os" "path/filepath" "testing" @@ -69,7 +70,7 @@ func testGetCommitsInfo(t *testing.T, repo1 *Repository) { assert.NoError(t, err) entries, err := tree.ListEntries() assert.NoError(t, err) - commitsInfo, treeCommit, err := entries.GetCommitsInfo(commit, testCase.Path, nil) + commitsInfo, treeCommit, err := entries.GetCommitsInfo(context.Background(), commit, testCase.Path, nil) assert.NoError(t, err) if err != nil { t.FailNow() @@ -136,7 +137,7 @@ func BenchmarkEntries_GetCommitsInfo(b *testing.B) { b.ResetTimer() b.Run(benchmark.name, func(b *testing.B) { for i := 0; i < b.N; i++ { - _, _, err := entries.GetCommitsInfo(commit, "", nil) + _, _, err := entries.GetCommitsInfo(context.Background(), commit, "", nil) if err != nil { b.Fatal(err) } diff --git a/modules/git/commit_reader.go b/modules/git/commit_reader.go index a4d15b6bada7..3c1f6f5ffd65 100644 --- a/modules/git/commit_reader.go +++ b/modules/git/commit_reader.go @@ -17,7 +17,9 @@ import ( // If used as part of a cat-file --batch stream you need to limit the reader to the correct size func CommitFromReader(gitRepo *Repository, sha SHA1, reader io.Reader) (*Commit, error) { commit := &Commit{ - ID: sha, + ID: sha, + Author: &Signature{}, + Committer: &Signature{}, } payloadSB := new(strings.Builder) diff --git a/modules/git/commit_test.go b/modules/git/commit_test.go index 0925a6ce6ac1..57132c00dc69 100644 --- a/modules/git/commit_test.go +++ b/modules/git/commit_test.go @@ -130,3 +130,109 @@ func TestHasPreviousCommit(t *testing.T) { assert.NoError(t, err) assert.False(t, selfNot) } + +func TestParseCommitFileStatus(t *testing.T) { + type testcase struct { + output string + added []string + removed []string + modified []string + } + + kases := []testcase{ + { + // Merge commit + output: "MM\x00options/locale/locale_en-US.ini\x00", + modified: []string{ + "options/locale/locale_en-US.ini", + }, + added: []string{}, + removed: []string{}, + }, + { + // Spaces commit + output: "D\x00b\x00D\x00b b/b\x00A\x00b b/b b/b b/b\x00A\x00b b/b b/b b/b b/b\x00", + removed: []string{ + "b", + "b b/b", + }, + modified: []string{}, + added: []string{ + "b b/b b/b b/b", + "b b/b b/b b/b b/b", + }, + }, + { + // larger commit + output: "M\x00go.mod\x00M\x00go.sum\x00M\x00modules/ssh/ssh.go\x00M\x00vendor/github.com/gliderlabs/ssh/circle.yml\x00M\x00vendor/github.com/gliderlabs/ssh/context.go\x00A\x00vendor/github.com/gliderlabs/ssh/go.mod\x00A\x00vendor/github.com/gliderlabs/ssh/go.sum\x00M\x00vendor/github.com/gliderlabs/ssh/server.go\x00M\x00vendor/github.com/gliderlabs/ssh/session.go\x00M\x00vendor/github.com/gliderlabs/ssh/ssh.go\x00M\x00vendor/golang.org/x/sys/unix/mkerrors.sh\x00M\x00vendor/golang.org/x/sys/unix/syscall_darwin.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_darwin_amd64.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_darwin_arm64.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_freebsd_386.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_freebsd_amd64.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_freebsd_arm.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_freebsd_arm64.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_linux.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_darwin_amd64.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_darwin_arm64.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_dragonfly_amd64.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_freebsd_386.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_freebsd_amd64.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_freebsd_arm.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_freebsd_arm64.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_netbsd_386.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_netbsd_amd64.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_netbsd_arm.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_netbsd_arm64.go\x00M\x00vendor/modules.txt\x00", + modified: []string{ + "go.mod", + "go.sum", + "modules/ssh/ssh.go", + "vendor/github.com/gliderlabs/ssh/circle.yml", + "vendor/github.com/gliderlabs/ssh/context.go", + "vendor/github.com/gliderlabs/ssh/server.go", + "vendor/github.com/gliderlabs/ssh/session.go", + "vendor/github.com/gliderlabs/ssh/ssh.go", + "vendor/golang.org/x/sys/unix/mkerrors.sh", + "vendor/golang.org/x/sys/unix/syscall_darwin.go", + "vendor/golang.org/x/sys/unix/zerrors_darwin_amd64.go", + "vendor/golang.org/x/sys/unix/zerrors_darwin_arm64.go", + "vendor/golang.org/x/sys/unix/zerrors_freebsd_386.go", + "vendor/golang.org/x/sys/unix/zerrors_freebsd_amd64.go", + "vendor/golang.org/x/sys/unix/zerrors_freebsd_arm.go", + "vendor/golang.org/x/sys/unix/zerrors_freebsd_arm64.go", + "vendor/golang.org/x/sys/unix/zerrors_linux.go", + "vendor/golang.org/x/sys/unix/ztypes_darwin_amd64.go", + "vendor/golang.org/x/sys/unix/ztypes_darwin_arm64.go", + "vendor/golang.org/x/sys/unix/ztypes_dragonfly_amd64.go", + "vendor/golang.org/x/sys/unix/ztypes_freebsd_386.go", + "vendor/golang.org/x/sys/unix/ztypes_freebsd_amd64.go", + "vendor/golang.org/x/sys/unix/ztypes_freebsd_arm.go", + "vendor/golang.org/x/sys/unix/ztypes_freebsd_arm64.go", + "vendor/golang.org/x/sys/unix/ztypes_netbsd_386.go", + "vendor/golang.org/x/sys/unix/ztypes_netbsd_amd64.go", + "vendor/golang.org/x/sys/unix/ztypes_netbsd_arm.go", + "vendor/golang.org/x/sys/unix/ztypes_netbsd_arm64.go", + "vendor/modules.txt", + }, + added: []string{ + "vendor/github.com/gliderlabs/ssh/go.mod", + "vendor/github.com/gliderlabs/ssh/go.sum", + }, + removed: []string{}, + }, + { + // git 1.7.2 adds an unnecessary \x00 on merge commit + output: "\x00MM\x00options/locale/locale_en-US.ini\x00", + modified: []string{ + "options/locale/locale_en-US.ini", + }, + added: []string{}, + removed: []string{}, + }, + { + // git 1.7.2 adds an unnecessary \n on normal commit + output: "\nD\x00b\x00D\x00b b/b\x00A\x00b b/b b/b b/b\x00A\x00b b/b b/b b/b b/b\x00", + removed: []string{ + "b", + "b b/b", + }, + modified: []string{}, + added: []string{ + "b b/b b/b b/b", + "b b/b b/b b/b b/b", + }, + }, + } + + for _, kase := range kases { + fileStatus := NewCommitFileStatus() + parseCommitFileStatus(fileStatus, strings.NewReader(kase.output)) + + assert.Equal(t, kase.added, fileStatus.Added) + assert.Equal(t, kase.removed, fileStatus.Removed) + assert.Equal(t, kase.modified, fileStatus.Modified) + } + +} diff --git a/modules/git/diff.go b/modules/git/diff.go index 5da53568e570..20f25c1bee3f 100644 --- a/modules/git/diff.go +++ b/modules/git/diff.go @@ -15,6 +15,7 @@ import ( "strconv" "strings" + "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/process" ) @@ -113,7 +114,7 @@ func ParseDiffHunkString(diffhunk string) (leftLine, leftHunk, rightLine, righHu righHunk, _ = strconv.Atoi(rightRange[1]) } } else { - log("Parse line number failed: %v", diffhunk) + log.Debug("Parse line number failed: %v", diffhunk) rightLine = leftLine righHunk = leftHunk } diff --git a/modules/git/error.go b/modules/git/error.go index 85a938a7da26..387dd724e585 100644 --- a/modules/git/error.go +++ b/modules/git/error.go @@ -159,3 +159,20 @@ func (err *ErrPushRejected) GenerateMessage() { } err.Message = strings.TrimSpace(messageBuilder.String()) } + +// ErrMoreThanOne represents an error if pull request fails when there are more than one sources (branch, tag) with the same name +type ErrMoreThanOne struct { + StdOut string + StdErr string + Err error +} + +// IsErrMoreThanOne checks if an error is a ErrMoreThanOne +func IsErrMoreThanOne(err error) bool { + _, ok := err.(*ErrMoreThanOne) + return ok +} + +func (err *ErrMoreThanOne) Error() string { + return fmt.Sprintf("ErrMoreThanOne Error: %v: %s\n%s", err.Err, err.StdErr, err.StdOut) +} diff --git a/modules/git/git.go b/modules/git/git.go index 6b15138a5c74..ef6ec0c2bf32 100644 --- a/modules/git/git.go +++ b/modules/git/git.go @@ -14,14 +14,12 @@ import ( "time" "code.gitea.io/gitea/modules/process" + "code.gitea.io/gitea/modules/setting" "github.com/hashicorp/go-version" ) var ( - // Debug enables verbose logging on everything. - // This should be false in case Gogs starts in SSH mode. - Debug = false // Prefix the log prefix Prefix = "[git-module] " // GitVersionRequired is the minimum Git version required @@ -41,19 +39,6 @@ var ( goVersionLessThan115 = true ) -func log(format string, args ...interface{}) { - if !Debug { - return - } - - fmt.Print(Prefix) - if len(args) == 0 { - fmt.Println(format) - } else { - fmt.Printf(format+"\n", args...) - } -} - // LocalVersion returns current Git version from shell. func LocalVersion() (*version.Version, error) { if err := LoadGitVersion(); err != nil { @@ -122,10 +107,42 @@ func SetExecutablePath(path string) error { return nil } +// VersionInfo returns git version information +func VersionInfo() string { + var format = "Git Version: %s" + var args = []interface{}{gitVersion.Original()} + // Since git wire protocol has been released from git v2.18 + if setting.Git.EnableAutoGitWireProtocol && CheckGitVersionAtLeast("2.18") == nil { + format += ", Wire Protocol %s Enabled" + args = append(args, "Version 2") // for focus color + } + + return fmt.Sprintf(format, args...) +} + // Init initializes git module func Init(ctx context.Context) error { DefaultContext = ctx + defaultCommandExecutionTimeout = time.Duration(setting.Git.Timeout.Default) * time.Second + + if err := SetExecutablePath(setting.Git.Path); err != nil { + return err + } + + // force cleanup args + GlobalCommandArgs = []string{} + + if CheckGitVersionAtLeast("2.9") == nil { + // Explicitly disable credential helper, otherwise Git credentials might leak + GlobalCommandArgs = append(GlobalCommandArgs, "-c", "credential.helper=") + } + + // Since git wire protocol has been released from git v2.18 + if setting.Git.EnableAutoGitWireProtocol && CheckGitVersionAtLeast("2.18") == nil { + GlobalCommandArgs = append(GlobalCommandArgs, "-c", "protocol.version=2") + } + // Save current git version on init to gitVersion otherwise it would require an RWMutex if err := LoadGitVersion(); err != nil { return err diff --git a/modules/git/git_test.go b/modules/git/git_test.go index 27951d639bb7..c62a55badc68 100644 --- a/modules/git/git_test.go +++ b/modules/git/git_test.go @@ -9,6 +9,8 @@ import ( "fmt" "os" "testing" + + "code.gitea.io/gitea/modules/log" ) func fatalTestError(fmtStr string, args ...interface{}) { @@ -17,6 +19,8 @@ func fatalTestError(fmtStr string, args ...interface{}) { } func TestMain(m *testing.M) { + _ = log.NewLogger(1000, "console", "console", `{"level":"trace","stacktracelevel":"NONE","stderr":true}`) + if err := Init(context.Background()); err != nil { fatalTestError("Init failed: %v", err) } diff --git a/modules/git/hook.go b/modules/git/hook.go index c23fbf8aa1bb..7007d23be22e 100644 --- a/modules/git/hook.go +++ b/modules/git/hook.go @@ -1,4 +1,5 @@ // Copyright 2015 The Gogs Authors. All rights reserved. +// Copyright 2021 The Gitea Authors. All rights reserved. // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. @@ -12,6 +13,7 @@ import ( "path/filepath" "strings" + "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/util" ) @@ -126,11 +128,11 @@ const ( // SetUpdateHook writes given content to update hook of the repository. func SetUpdateHook(repoPath, content string) (err error) { - log("Setting update hook: %s", repoPath) + log.Debug("Setting update hook: %s", repoPath) hookPath := path.Join(repoPath, HookPathUpdate) isExist, err := util.IsExist(hookPath) if err != nil { - log("Unable to check if %s exists. Error: %v", hookPath, err) + log.Debug("Unable to check if %s exists. Error: %v", hookPath, err) return err } if isExist { diff --git a/modules/git/last_commit_cache.go b/modules/git/last_commit_cache.go index 37a59e1fa812..e2d296641f3b 100644 --- a/modules/git/last_commit_cache.go +++ b/modules/git/last_commit_cache.go @@ -7,6 +7,8 @@ package git import ( "crypto/sha256" "fmt" + + "code.gitea.io/gitea/modules/log" ) // Cache represents a caching interface @@ -24,6 +26,6 @@ func (c *LastCommitCache) getCacheKey(repoPath, ref, entryPath string) string { // Put put the last commit id with commit and entry path func (c *LastCommitCache) Put(ref, entryPath, commitID string) error { - log("LastCommitCache save: [%s:%s:%s]", ref, entryPath, commitID) + log.Debug("LastCommitCache save: [%s:%s:%s]", ref, entryPath, commitID) return c.cache.Put(c.getCacheKey(c.repoPath, ref, entryPath), commitID, c.ttl()) } diff --git a/modules/git/last_commit_cache_gogit.go b/modules/git/last_commit_cache_gogit.go index 65d6299bef38..b8e0db46a926 100644 --- a/modules/git/last_commit_cache_gogit.go +++ b/modules/git/last_commit_cache_gogit.go @@ -7,8 +7,11 @@ package git import ( + "context" "path" + "code.gitea.io/gitea/modules/log" + "github.com/go-git/go-git/v5/plumbing/object" cgobject "github.com/go-git/go-git/v5/plumbing/object/commitgraph" ) @@ -40,9 +43,9 @@ func NewLastCommitCache(repoPath string, gitRepo *Repository, ttl func() int64, func (c *LastCommitCache) Get(ref, entryPath string) (interface{}, error) { v := c.cache.Get(c.getCacheKey(c.repoPath, ref, entryPath)) if vs, ok := v.(string); ok { - log("LastCommitCache hit level 1: [%s:%s:%s]", ref, entryPath, vs) + log.Debug("LastCommitCache hit level 1: [%s:%s:%s]", ref, entryPath, vs) if commit, ok := c.commitCache[vs]; ok { - log("LastCommitCache hit level 2: [%s:%s:%s]", ref, entryPath, vs) + log.Debug("LastCommitCache hit level 2: [%s:%s:%s]", ref, entryPath, vs) return commit, nil } id, err := c.repo.ConvertToSHA1(vs) @@ -60,7 +63,7 @@ func (c *LastCommitCache) Get(ref, entryPath string) (interface{}, error) { } // CacheCommit will cache the commit from the gitRepository -func (c *LastCommitCache) CacheCommit(commit *Commit) error { +func (c *LastCommitCache) CacheCommit(ctx context.Context, commit *Commit) error { commitNodeIndex, _ := commit.repo.CommitNodeIndex() @@ -69,10 +72,10 @@ func (c *LastCommitCache) CacheCommit(commit *Commit) error { return err } - return c.recursiveCache(index, &commit.Tree, "", 1) + return c.recursiveCache(ctx, index, &commit.Tree, "", 1) } -func (c *LastCommitCache) recursiveCache(index cgobject.CommitNode, tree *Tree, treePath string, level int) error { +func (c *LastCommitCache) recursiveCache(ctx context.Context, index cgobject.CommitNode, tree *Tree, treePath string, level int) error { if level == 0 { return nil } @@ -89,7 +92,7 @@ func (c *LastCommitCache) recursiveCache(index cgobject.CommitNode, tree *Tree, entryMap[entry.Name()] = entry } - commits, err := GetLastCommitForPaths(index, treePath, entryPaths) + commits, err := GetLastCommitForPaths(ctx, index, treePath, entryPaths) if err != nil { return err } @@ -103,7 +106,7 @@ func (c *LastCommitCache) recursiveCache(index cgobject.CommitNode, tree *Tree, if err != nil { return err } - if err := c.recursiveCache(index, subTree, entry, level-1); err != nil { + if err := c.recursiveCache(ctx, index, subTree, entry, level-1); err != nil { return err } } diff --git a/modules/git/last_commit_cache_nogogit.go b/modules/git/last_commit_cache_nogogit.go index 0a1babb11214..faf6e23fa816 100644 --- a/modules/git/last_commit_cache_nogogit.go +++ b/modules/git/last_commit_cache_nogogit.go @@ -8,8 +8,10 @@ package git import ( "bufio" - "io" + "context" "path" + + "code.gitea.io/gitea/modules/log" ) // LastCommitCache represents a cache to store last commit @@ -36,12 +38,12 @@ func NewLastCommitCache(repoPath string, gitRepo *Repository, ttl func() int64, } // Get get the last commit information by commit id and entry path -func (c *LastCommitCache) Get(ref, entryPath string, wr *io.PipeWriter, rd *bufio.Reader) (interface{}, error) { +func (c *LastCommitCache) Get(ref, entryPath string, wr WriteCloserError, rd *bufio.Reader) (interface{}, error) { v := c.cache.Get(c.getCacheKey(c.repoPath, ref, entryPath)) if vs, ok := v.(string); ok { - log("LastCommitCache hit level 1: [%s:%s:%s]", ref, entryPath, vs) + log.Debug("LastCommitCache hit level 1: [%s:%s:%s]", ref, entryPath, vs) if commit, ok := c.commitCache[vs]; ok { - log("LastCommitCache hit level 2: [%s:%s:%s]", ref, entryPath, vs) + log.Debug("LastCommitCache hit level 2: [%s:%s:%s]", ref, entryPath, vs) return commit, nil } id, err := c.repo.ConvertToSHA1(vs) @@ -62,11 +64,11 @@ func (c *LastCommitCache) Get(ref, entryPath string, wr *io.PipeWriter, rd *bufi } // CacheCommit will cache the commit from the gitRepository -func (c *LastCommitCache) CacheCommit(commit *Commit) error { - return c.recursiveCache(commit, &commit.Tree, "", 1) +func (c *LastCommitCache) CacheCommit(ctx context.Context, commit *Commit) error { + return c.recursiveCache(ctx, commit, &commit.Tree, "", 1) } -func (c *LastCommitCache) recursiveCache(commit *Commit, tree *Tree, treePath string, level int) error { +func (c *LastCommitCache) recursiveCache(ctx context.Context, commit *Commit, tree *Tree, treePath string, level int) error { if level == 0 { return nil } @@ -83,22 +85,22 @@ func (c *LastCommitCache) recursiveCache(commit *Commit, tree *Tree, treePath st entryMap[entry.Name()] = entry } - commits, err := GetLastCommitForPaths(commit, treePath, entryPaths) + commits, err := GetLastCommitForPaths(ctx, commit, treePath, entryPaths) if err != nil { return err } - for i, entryCommit := range commits { - entry := entryPaths[i] - if err := c.Put(commit.ID.String(), path.Join(treePath, entryPaths[i]), entryCommit.ID.String()); err != nil { + for entry, entryCommit := range commits { + if err := c.Put(commit.ID.String(), path.Join(treePath, entry), entryCommit.ID.String()); err != nil { return err } - if entryMap[entry].IsDir() { + // entryMap won't contain "" therefore skip this. + if treeEntry := entryMap[entry]; treeEntry != nil && treeEntry.IsDir() { subTree, err := tree.SubTree(entry) if err != nil { return err } - if err := c.recursiveCache(commit, subTree, entry, level-1); err != nil { + if err := c.recursiveCache(ctx, commit, subTree, entry, level-1); err != nil { return err } } diff --git a/modules/git/lfs.go b/modules/git/lfs.go new file mode 100644 index 000000000000..79049c98245a --- /dev/null +++ b/modules/git/lfs.go @@ -0,0 +1,37 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package git + +import ( + "sync" + + logger "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" +) + +var once sync.Once + +// CheckLFSVersion will check lfs version, if not satisfied, then disable it. +func CheckLFSVersion() { + if setting.LFS.StartServer { + //Disable LFS client hooks if installed for the current OS user + //Needs at least git v2.1.2 + + err := LoadGitVersion() + if err != nil { + logger.Fatal("Error retrieving git version: %v", err) + } + + if CheckGitVersionAtLeast("2.1.2") != nil { + setting.LFS.StartServer = false + logger.Error("LFS server support needs at least Git v2.1.2") + } else { + once.Do(func() { + GlobalCommandArgs = append(GlobalCommandArgs, "-c", "filter.lfs.required=", + "-c", "filter.lfs.smudge=", "-c", "filter.lfs.clean=") + }) + } + } +} diff --git a/modules/git/log_name_status.go b/modules/git/log_name_status.go new file mode 100644 index 000000000000..803d614d611a --- /dev/null +++ b/modules/git/log_name_status.go @@ -0,0 +1,398 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package git + +import ( + "bufio" + "bytes" + "context" + "io" + "path" + "sort" + "strings" + + "github.com/djherbis/buffer" + "github.com/djherbis/nio/v3" +) + +// LogNameStatusRepo opens git log --raw in the provided repo and returns a stdin pipe, a stdout reader and cancel function +func LogNameStatusRepo(repository, head, treepath string, paths ...string) (*bufio.Reader, func()) { + // We often want to feed the commits in order into cat-file --batch, followed by their trees and sub trees as necessary. + // so let's create a batch stdin and stdout + stdoutReader, stdoutWriter := nio.Pipe(buffer.New(32 * 1024)) + cancel := func() { + _ = stdoutReader.Close() + _ = stdoutWriter.Close() + } + + args := make([]string, 0, 8+len(paths)) + args = append(args, "log", "--name-status", "-c", "--format=commit%x00%H %P%x00", "--parents", "--no-renames", "-t", "-z", head, "--") + if len(paths) < 70 { + if treepath != "" { + args = append(args, treepath) + for _, pth := range paths { + if pth != "" { + args = append(args, path.Join(treepath, pth)) + } + } + } else { + for _, pth := range paths { + if pth != "" { + args = append(args, pth) + } + } + } + } else if treepath != "" { + args = append(args, treepath) + } + + go func() { + stderr := strings.Builder{} + err := NewCommand(args...).RunInDirFullPipeline(repository, stdoutWriter, &stderr, nil) + if err != nil { + _ = stdoutWriter.CloseWithError(ConcatenateError(err, (&stderr).String())) + } else { + _ = stdoutWriter.Close() + } + }() + + // For simplicities sake we'll us a buffered reader to read from the cat-file --batch + bufReader := bufio.NewReaderSize(stdoutReader, 32*1024) + + return bufReader, cancel +} + +// LogNameStatusRepoParser parses a git log raw output from LogRawRepo +type LogNameStatusRepoParser struct { + treepath string + paths []string + next []byte + buffull bool + rd *bufio.Reader + cancel func() +} + +// NewLogNameStatusRepoParser returns a new parser for a git log raw output +func NewLogNameStatusRepoParser(repository, head, treepath string, paths ...string) *LogNameStatusRepoParser { + rd, cancel := LogNameStatusRepo(repository, head, treepath, paths...) + return &LogNameStatusRepoParser{ + treepath: treepath, + paths: paths, + rd: rd, + cancel: cancel, + } +} + +// LogNameStatusCommitData represents a commit artefact from git log raw +type LogNameStatusCommitData struct { + CommitID string + ParentIDs []string + Paths []bool +} + +// Next returns the next LogStatusCommitData +func (g *LogNameStatusRepoParser) Next(treepath string, paths2ids map[string]int, changed []bool, maxpathlen int) (*LogNameStatusCommitData, error) { + var err error + if g.next == nil || len(g.next) == 0 { + g.buffull = false + g.next, err = g.rd.ReadSlice('\x00') + if err != nil { + if err == bufio.ErrBufferFull { + g.buffull = true + } else if err == io.EOF { + return nil, nil + } else { + return nil, err + } + } + } + + ret := LogNameStatusCommitData{} + if bytes.Equal(g.next, []byte("commit\000")) { + g.next, err = g.rd.ReadSlice('\x00') + if err != nil { + if err == bufio.ErrBufferFull { + g.buffull = true + } else if err == io.EOF { + return nil, nil + } else { + return nil, err + } + } + } + + // Our "line" must look like: SP ( SP) * NUL + ret.CommitID = string(g.next[0:40]) + parents := string(g.next[41:]) + if g.buffull { + more, err := g.rd.ReadString('\x00') + if err != nil { + return nil, err + } + parents += more + } + parents = parents[:len(parents)-1] + ret.ParentIDs = strings.Split(parents, " ") + + // now read the next "line" + g.buffull = false + g.next, err = g.rd.ReadSlice('\x00') + if err != nil { + if err == bufio.ErrBufferFull { + g.buffull = true + } else if err != io.EOF { + return nil, err + } + } + + if err == io.EOF || !(g.next[0] == '\n' || g.next[0] == '\000') { + return &ret, nil + } + + // Ok we have some changes. + // This line will look like: NL NUL + // + // Subsequent lines will not have the NL - so drop it here - g.bufffull must also be false at this point too. + if g.next[0] == '\n' { + g.next = g.next[1:] + } else { + g.buffull = false + g.next, err = g.rd.ReadSlice('\x00') + if err != nil { + if err == bufio.ErrBufferFull { + g.buffull = true + } else if err != io.EOF { + return nil, err + } + } + if g.next[0] == '\x00' { + g.buffull = false + g.next, err = g.rd.ReadSlice('\x00') + if err != nil { + if err == bufio.ErrBufferFull { + g.buffull = true + } else if err != io.EOF { + return nil, err + } + } + } + } + + fnameBuf := make([]byte, 4096) + +diffloop: + for { + if err == io.EOF || bytes.Equal(g.next, []byte("commit\000")) { + return &ret, nil + } + g.next, err = g.rd.ReadSlice('\x00') + if err != nil { + if err == bufio.ErrBufferFull { + g.buffull = true + } else if err == io.EOF { + return &ret, nil + } else { + return nil, err + } + } + copy(fnameBuf, g.next) + if len(fnameBuf) < len(g.next) { + fnameBuf = append(fnameBuf, g.next[len(fnameBuf):]...) + } else { + fnameBuf = fnameBuf[:len(g.next)] + } + if err != nil { + if err != bufio.ErrBufferFull { + return nil, err + } + more, err := g.rd.ReadBytes('\x00') + if err != nil { + return nil, err + } + fnameBuf = append(fnameBuf, more...) + } + + // read the next line + g.buffull = false + g.next, err = g.rd.ReadSlice('\x00') + if err != nil { + if err == bufio.ErrBufferFull { + g.buffull = true + } else if err != io.EOF { + return nil, err + } + } + + if treepath != "" { + if !bytes.HasPrefix(fnameBuf, []byte(treepath)) { + fnameBuf = fnameBuf[:cap(fnameBuf)] + continue diffloop + } + } + fnameBuf = fnameBuf[len(treepath) : len(fnameBuf)-1] + if len(fnameBuf) > maxpathlen { + fnameBuf = fnameBuf[:cap(fnameBuf)] + continue diffloop + } + if len(fnameBuf) > 0 { + if len(treepath) > 0 { + if fnameBuf[0] != '/' || bytes.IndexByte(fnameBuf[1:], '/') >= 0 { + fnameBuf = fnameBuf[:cap(fnameBuf)] + continue diffloop + } + fnameBuf = fnameBuf[1:] + } else if bytes.IndexByte(fnameBuf, '/') >= 0 { + fnameBuf = fnameBuf[:cap(fnameBuf)] + continue diffloop + } + } + + idx, ok := paths2ids[string(fnameBuf)] + if !ok { + fnameBuf = fnameBuf[:cap(fnameBuf)] + continue diffloop + } + if ret.Paths == nil { + ret.Paths = changed + } + changed[idx] = true + } +} + +// Close closes the parser +func (g *LogNameStatusRepoParser) Close() { + g.cancel() +} + +// WalkGitLog walks the git log --name-status for the head commit in the provided treepath and files +func WalkGitLog(ctx context.Context, repo *Repository, head *Commit, treepath string, paths ...string) (map[string]string, error) { + tree, err := head.SubTree(treepath) + if err != nil { + return nil, err + } + + entries, err := tree.ListEntries() + if err != nil { + return nil, err + } + + if len(paths) == 0 { + paths = make([]string, 0, len(entries)+1) + paths = append(paths, "") + for _, entry := range entries { + paths = append(paths, entry.Name()) + } + } else { + sort.Strings(paths) + if paths[0] != "" { + paths = append([]string{""}, paths...) + } + // remove duplicates + for i := len(paths) - 1; i > 0; i-- { + if paths[i] == paths[i-1] { + paths = append(paths[:i-1], paths[i:]...) + } + } + } + + path2idx := map[string]int{} + maxpathlen := len(treepath) + + for i := range paths { + path2idx[paths[i]] = i + pthlen := len(paths[i]) + len(treepath) + 1 + if pthlen > maxpathlen { + maxpathlen = pthlen + } + } + + g := NewLogNameStatusRepoParser(repo.Path, head.ID.String(), treepath, paths...) + defer g.Close() + + results := make([]string, len(paths)) + remaining := len(paths) + nextRestart := (len(paths) * 3) / 4 + if nextRestart > 70 { + nextRestart = 70 + } + lastEmptyParent := head.ID.String() + commitSinceLastEmptyParent := uint64(0) + commitSinceNextRestart := uint64(0) + parentRemaining := map[string]bool{} + + changed := make([]bool, len(paths)) + +heaploop: + for { + select { + case <-ctx.Done(): + return nil, ctx.Err() + default: + } + current, err := g.Next(treepath, path2idx, changed, maxpathlen) + if err != nil { + g.Close() + return nil, err + } + if current == nil { + break heaploop + } + delete(parentRemaining, current.CommitID) + if current.Paths != nil { + for i, found := range current.Paths { + if !found { + continue + } + changed[i] = false + if results[i] == "" { + results[i] = current.CommitID + delete(path2idx, paths[i]) + remaining-- + if results[0] == "" { + results[0] = current.CommitID + delete(path2idx, "") + remaining-- + } + } + } + } + + if remaining <= 0 { + break heaploop + } + commitSinceLastEmptyParent++ + if len(parentRemaining) == 0 { + lastEmptyParent = current.CommitID + commitSinceLastEmptyParent = 0 + } + if remaining <= nextRestart { + commitSinceNextRestart++ + if 4*commitSinceNextRestart > 3*commitSinceLastEmptyParent { + g.Close() + remainingPaths := make([]string, 0, len(paths)) + for i, pth := range paths { + if results[i] == "" { + remainingPaths = append(remainingPaths, pth) + } + } + g = NewLogNameStatusRepoParser(repo.Path, lastEmptyParent, treepath, remainingPaths...) + parentRemaining = map[string]bool{} + nextRestart = (remaining * 3) / 4 + continue heaploop + } + } + for _, parent := range current.ParentIDs { + parentRemaining[parent] = true + } + } + g.Close() + + resultsMap := map[string]string{} + for i, pth := range paths { + resultsMap[pth] = results[i] + } + + return resultsMap, nil +} diff --git a/modules/git/notes_gogit.go b/modules/git/notes_gogit.go index 173d29cee6b4..534a5d517153 100644 --- a/modules/git/notes_gogit.go +++ b/modules/git/notes_gogit.go @@ -7,13 +7,14 @@ package git import ( + "context" "io/ioutil" "github.com/go-git/go-git/v5/plumbing/object" ) // GetNote retrieves the git-notes data for a given commit. -func GetNote(repo *Repository, commitID string, note *Note) error { +func GetNote(ctx context.Context, repo *Repository, commitID string, note *Note) error { notes, err := repo.GetCommit(NotesRef) if err != nil { return err @@ -62,7 +63,7 @@ func GetNote(repo *Repository, commitID string, note *Note) error { return err } - lastCommits, err := GetLastCommitForPaths(commitNode, "", []string{path}) + lastCommits, err := GetLastCommitForPaths(ctx, commitNode, "", []string{path}) if err != nil { return err } diff --git a/modules/git/notes_nogogit.go b/modules/git/notes_nogogit.go index 1379e5085365..267087a86faf 100644 --- a/modules/git/notes_nogogit.go +++ b/modules/git/notes_nogogit.go @@ -7,12 +7,13 @@ package git import ( + "context" "io/ioutil" "strings" ) // GetNote retrieves the git-notes data for a given commit. -func GetNote(repo *Repository, commitID string, note *Note) error { +func GetNote(ctx context.Context, repo *Repository, commitID string, note *Note) error { notes, err := repo.GetCommit(NotesRef) if err != nil { return err @@ -43,11 +44,18 @@ func GetNote(repo *Repository, commitID string, note *Note) error { if err != nil { return err } - defer dataRc.Close() + closed := false + defer func() { + if !closed { + _ = dataRc.Close() + } + }() d, err := ioutil.ReadAll(dataRc) if err != nil { return err } + _ = dataRc.Close() + closed = true note.Message = d treePath := "" @@ -56,11 +64,11 @@ func GetNote(repo *Repository, commitID string, note *Note) error { path = path[idx+1:] } - lastCommits, err := GetLastCommitForPaths(notes, treePath, []string{path}) + lastCommits, err := GetLastCommitForPaths(ctx, notes, treePath, []string{path}) if err != nil { return err } - note.Commit = lastCommits[0] + note.Commit = lastCommits[path] return nil } diff --git a/modules/git/notes_test.go b/modules/git/notes_test.go index b7939e691355..f66a191e6ae2 100644 --- a/modules/git/notes_test.go +++ b/modules/git/notes_test.go @@ -5,6 +5,7 @@ package git import ( + "context" "path/filepath" "testing" @@ -18,7 +19,7 @@ func TestGetNotes(t *testing.T) { defer bareRepo1.Close() note := Note{} - err = GetNote(bareRepo1, "95bb4d39648ee7e325106df01a621c530863a653", ¬e) + err = GetNote(context.Background(), bareRepo1, "95bb4d39648ee7e325106df01a621c530863a653", ¬e) assert.NoError(t, err) assert.Equal(t, []byte("Note contents\n"), note.Message) assert.Equal(t, "Vladimir Panteleev", note.Commit.Author.Name) @@ -31,10 +32,10 @@ func TestGetNestedNotes(t *testing.T) { defer repo.Close() note := Note{} - err = GetNote(repo, "3e668dbfac39cbc80a9ff9c61eb565d944453ba4", ¬e) + err = GetNote(context.Background(), repo, "3e668dbfac39cbc80a9ff9c61eb565d944453ba4", ¬e) assert.NoError(t, err) assert.Equal(t, []byte("Note 2"), note.Message) - err = GetNote(repo, "ba0a96fa63532d6c5087ecef070b0250ed72fa47", ¬e) + err = GetNote(context.Background(), repo, "ba0a96fa63532d6c5087ecef070b0250ed72fa47", ¬e) assert.NoError(t, err) assert.Equal(t, []byte("Note 1"), note.Message) } diff --git a/modules/git/parse_nogogit.go b/modules/git/parse_nogogit.go index e9e93f66fdc1..667111ec4a31 100644 --- a/modules/git/parse_nogogit.go +++ b/modules/git/parse_nogogit.go @@ -7,10 +7,14 @@ package git import ( + "bufio" "bytes" "fmt" + "io" "strconv" "strings" + + "code.gitea.io/gitea/modules/log" ) // ParseTreeEntries parses the output of a `git ls-tree -l` command. @@ -86,3 +90,49 @@ func parseTreeEntries(data []byte, ptree *Tree) ([]*TreeEntry, error) { } return entries, nil } + +func catBatchParseTreeEntries(ptree *Tree, rd *bufio.Reader, sz int64) ([]*TreeEntry, error) { + fnameBuf := make([]byte, 4096) + modeBuf := make([]byte, 40) + shaBuf := make([]byte, 40) + entries := make([]*TreeEntry, 0, 10) + +loop: + for sz > 0 { + mode, fname, sha, count, err := ParseTreeLine(rd, modeBuf, fnameBuf, shaBuf) + if err != nil { + if err == io.EOF { + break loop + } + return nil, err + } + sz -= int64(count) + entry := new(TreeEntry) + entry.ptree = ptree + + switch string(mode) { + case "100644": + entry.entryMode = EntryModeBlob + case "100755": + entry.entryMode = EntryModeExec + case "120000": + entry.entryMode = EntryModeSymlink + case "160000": + entry.entryMode = EntryModeCommit + case "40000": + entry.entryMode = EntryModeTree + default: + log.Debug("Unknown mode: %v", string(mode)) + return nil, fmt.Errorf("unknown mode: %v", string(mode)) + } + + entry.ID = MustID(sha) + entry.name = string(fname) + entries = append(entries, entry) + } + if _, err := rd.Discard(1); err != nil { + return entries, err + } + + return entries, nil +} diff --git a/modules/git/parse_nogogit_test.go b/modules/git/parse_nogogit_test.go index a9e7dcc7f8b1..502c38d4e8d9 100644 --- a/modules/git/parse_nogogit_test.go +++ b/modules/git/parse_nogogit_test.go @@ -58,7 +58,7 @@ func TestParseTreeEntries(t *testing.T) { for _, testCase := range testCases { entries, err := ParseTreeEntries([]byte(testCase.Input)) assert.NoError(t, err) - assert.EqualValues(t, len(testCase.Expected), len(entries)) + assert.Len(t, entries, len(testCase.Expected)) for i, entry := range entries { assert.EqualValues(t, testCase.Expected[i].ID, entry.ID) assert.EqualValues(t, testCase.Expected[i].name, entry.name) diff --git a/modules/git/pipeline/lfs_nogogit.go b/modules/git/pipeline/lfs_nogogit.go index 79f7528d3335..d3696fcda219 100644 --- a/modules/git/pipeline/lfs_nogogit.go +++ b/modules/git/pipeline/lfs_nogogit.go @@ -43,8 +43,6 @@ func FindLFSFile(repo *git.Repository, hash git.SHA1) ([]*LFSResult, error) { basePath := repo.Path - hashStr := hash.String() - // Use rev-list to provide us with all commits in order revListReader, revListWriter := io.Pipe() defer func() { @@ -64,7 +62,7 @@ func FindLFSFile(repo *git.Repository, hash git.SHA1) ([]*LFSResult, error) { // Next feed the commits in order into cat-file --batch, followed by their trees and sub trees as necessary. // so let's create a batch stdin and stdout - batchStdinWriter, batchReader, cancel := git.CatFileBatch(repo.Path) + batchStdinWriter, batchReader, cancel := repo.CatFileBatch() defer cancel() // We'll use a scanner for the revList because it's simpler than a bufio.Reader @@ -74,7 +72,7 @@ func FindLFSFile(repo *git.Repository, hash git.SHA1) ([]*LFSResult, error) { fnameBuf := make([]byte, 4096) modeBuf := make([]byte, 40) - workingShaBuf := make([]byte, 40) + workingShaBuf := make([]byte, 20) for scan.Scan() { // Get the next commit ID @@ -118,6 +116,9 @@ func FindLFSFile(repo *git.Repository, hash git.SHA1) ([]*LFSResult, error) { if err != nil { return nil, err } + if _, err := batchReader.Discard(1); err != nil { + return nil, err + } _, err := batchStdinWriter.Write([]byte(curCommit.Tree.ID.String() + "\n")) if err != nil { @@ -132,8 +133,7 @@ func FindLFSFile(repo *git.Repository, hash git.SHA1) ([]*LFSResult, error) { return nil, err } n += int64(count) - sha := git.To40ByteSHA(sha20byte) - if bytes.Equal(sha, []byte(hashStr)) { + if bytes.Equal(sha20byte, hash[:]) { result := LFSResult{ Name: curPath + string(fname), SHA: curCommit.ID.String(), @@ -143,10 +143,15 @@ func FindLFSFile(repo *git.Repository, hash git.SHA1) ([]*LFSResult, error) { } resultsMap[curCommit.ID.String()+":"+curPath+string(fname)] = &result } else if string(mode) == git.EntryModeTree.String() { - trees = append(trees, sha) + sha40Byte := make([]byte, 40) + git.To40ByteSHA(sha20byte, sha40Byte) + trees = append(trees, sha40Byte) paths = append(paths, curPath+string(fname)+"/") } } + if _, err := batchReader.Discard(1); err != nil { + return nil, err + } if len(trees) > 0 { _, err := batchStdinWriter.Write(trees[len(trees)-1]) if err != nil { diff --git a/modules/git/remote.go b/modules/git/remote.go new file mode 100644 index 000000000000..7ba2b35a5ed3 --- /dev/null +++ b/modules/git/remote.go @@ -0,0 +1,31 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package git + +import "net/url" + +// GetRemoteAddress returns the url of a specific remote of the repository. +func GetRemoteAddress(repoPath, remoteName string) (*url.URL, error) { + err := LoadGitVersion() + if err != nil { + return nil, err + } + var cmd *Command + if CheckGitVersionAtLeast("2.7") == nil { + cmd = NewCommand("remote", "get-url", remoteName) + } else { + cmd = NewCommand("config", "--get", "remote."+remoteName+".url") + } + + result, err := cmd.RunInDir(repoPath) + if err != nil { + return nil, err + } + + if len(result) > 0 { + result = result[:len(result)-1] + } + return url.Parse(result) +} diff --git a/modules/git/repo.go b/modules/git/repo.go index 515899ab0498..43f329f4487d 100644 --- a/modules/git/repo.go +++ b/modules/git/repo.go @@ -182,10 +182,12 @@ func Pull(repoPath string, opts PullRemoteOptions) error { // PushOptions options when push to remote type PushOptions struct { - Remote string - Branch string - Force bool - Env []string + Remote string + Branch string + Force bool + Mirror bool + Env []string + Timeout time.Duration } // Push pushs local commits to given remote branch. @@ -194,10 +196,20 @@ func Push(repoPath string, opts PushOptions) error { if opts.Force { cmd.AddArguments("-f") } - cmd.AddArguments("--", opts.Remote, opts.Branch) + if opts.Mirror { + cmd.AddArguments("--mirror") + } + cmd.AddArguments("--", opts.Remote) + if len(opts.Branch) > 0 { + cmd.AddArguments(opts.Branch) + } var outbuf, errbuf strings.Builder - err := cmd.RunInDirTimeoutEnvPipeline(opts.Env, -1, repoPath, &outbuf, &errbuf) + if opts.Timeout == 0 { + opts.Timeout = -1 + } + + err := cmd.RunInDirTimeoutEnvPipeline(opts.Env, opts.Timeout, repoPath, &outbuf, &errbuf) if err != nil { if strings.Contains(errbuf.String(), "non-fast-forward") { return &ErrPushOutOfDate{ @@ -213,6 +225,13 @@ func Push(repoPath string, opts PushOptions) error { } err.GenerateMessage() return err + } else if strings.Contains(errbuf.String(), "matches more than one") { + err := &ErrMoreThanOne{ + StdOut: outbuf.String(), + StdErr: errbuf.String(), + Err: err, + } + return err } } diff --git a/modules/git/commit_archive.go b/modules/git/repo_archive.go similarity index 60% rename from modules/git/commit_archive.go rename to modules/git/repo_archive.go index d075ba09115f..07003aa6b2c1 100644 --- a/modules/git/commit_archive.go +++ b/modules/git/repo_archive.go @@ -8,6 +8,7 @@ package git import ( "context" "fmt" + "io" "path/filepath" "strings" ) @@ -33,32 +34,28 @@ func (a ArchiveType) String() string { return "unknown" } -// CreateArchiveOpts represents options for creating an archive -type CreateArchiveOpts struct { - Format ArchiveType - Prefix bool -} - // CreateArchive create archive content to the target path -func (c *Commit) CreateArchive(ctx context.Context, target string, opts CreateArchiveOpts) error { - if opts.Format.String() == "unknown" { - return fmt.Errorf("unknown format: %v", opts.Format) +func (repo *Repository) CreateArchive(ctx context.Context, format ArchiveType, target io.Writer, usePrefix bool, commitID string) error { + if format.String() == "unknown" { + return fmt.Errorf("unknown format: %v", format) } args := []string{ "archive", } - if opts.Prefix { - args = append(args, "--prefix="+filepath.Base(strings.TrimSuffix(c.repo.Path, ".git"))+"/") + if usePrefix { + args = append(args, "--prefix="+filepath.Base(strings.TrimSuffix(repo.Path, ".git"))+"/") } args = append(args, - "--format="+opts.Format.String(), - "-o", - target, - c.ID.String(), + "--format="+format.String(), + commitID, ) - _, err := NewCommandContext(ctx, args...).RunInDir(c.repo.Path) - return err + var stderr strings.Builder + err := NewCommandContext(ctx, args...).RunInDirPipeline(repo.Path, target, &stderr) + if err != nil { + return ConcatenateError(err, stderr.String()) + } + return nil } diff --git a/modules/git/repo_base_gogit.go b/modules/git/repo_base_gogit.go index 19a3f84571fb..6186824c0b9f 100644 --- a/modules/git/repo_base_gogit.go +++ b/modules/git/repo_base_gogit.go @@ -12,6 +12,8 @@ import ( "path/filepath" gitealog "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" + "github.com/go-git/go-billy/v5/osfs" gogit "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing/cache" @@ -46,7 +48,7 @@ func OpenRepository(repoPath string) (*Repository, error) { return nil, err } } - storage := filesystem.NewStorageWithOptions(fs, cache.NewObjectLRUDefault(), filesystem.Options{KeepDescriptors: true}) + storage := filesystem.NewStorageWithOptions(fs, cache.NewObjectLRUDefault(), filesystem.Options{KeepDescriptors: true, LargeObjectThreshold: setting.Git.LargeObjectThreshold}) gogitRepo, err := gogit.Open(storage, fs) if err != nil { return nil, err diff --git a/modules/git/repo_base_nogogit.go b/modules/git/repo_base_nogogit.go index e05219a4e705..1675967d181b 100644 --- a/modules/git/repo_base_nogogit.go +++ b/modules/git/repo_base_nogogit.go @@ -8,8 +8,12 @@ package git import ( + "bufio" + "context" "errors" "path/filepath" + + "code.gitea.io/gitea/modules/log" ) // Repository represents a Git repository. @@ -19,6 +23,14 @@ type Repository struct { tagCache *ObjectCache gpgSettings *GPGSettings + + batchCancel context.CancelFunc + batchReader *bufio.Reader + batchWriter WriteCloserError + + checkCancel context.CancelFunc + checkReader *bufio.Reader + checkWriter WriteCloserError } // OpenRepository opens the repository at the given path. @@ -29,12 +41,51 @@ func OpenRepository(repoPath string) (*Repository, error) { } else if !isDir(repoPath) { return nil, errors.New("no such file or directory") } - return &Repository{ + + repo := &Repository{ Path: repoPath, tagCache: newObjectCache(), - }, nil + } + + repo.batchWriter, repo.batchReader, repo.batchCancel = CatFileBatch(repoPath) + repo.checkWriter, repo.checkReader, repo.checkCancel = CatFileBatchCheck(repo.Path) + + return repo, nil +} + +// CatFileBatch obtains a CatFileBatch for this repository +func (repo *Repository) CatFileBatch() (WriteCloserError, *bufio.Reader, func()) { + if repo.batchCancel == nil || repo.batchReader.Buffered() > 0 { + log.Debug("Opening temporary cat file batch for: %s", repo.Path) + return CatFileBatch(repo.Path) + } + return repo.batchWriter, repo.batchReader, func() {} +} + +// CatFileBatchCheck obtains a CatFileBatchCheck for this repository +func (repo *Repository) CatFileBatchCheck() (WriteCloserError, *bufio.Reader, func()) { + if repo.checkCancel == nil || repo.checkReader.Buffered() > 0 { + log.Debug("Opening temporary cat file batch-check: %s", repo.Path) + return CatFileBatchCheck(repo.Path) + } + return repo.checkWriter, repo.checkReader, func() {} } // Close this repository, in particular close the underlying gogitStorage if this is not nil func (repo *Repository) Close() { + if repo == nil { + return + } + if repo.batchCancel != nil { + repo.batchCancel() + repo.batchReader = nil + repo.batchWriter = nil + repo.batchCancel = nil + } + if repo.checkCancel != nil { + repo.checkCancel() + repo.checkCancel = nil + repo.checkReader = nil + repo.checkWriter = nil + } } diff --git a/modules/git/repo_blob_nogogit.go b/modules/git/repo_blob_nogogit.go index 9959420df4e9..afb08d29cb98 100644 --- a/modules/git/repo_blob_nogogit.go +++ b/modules/git/repo_blob_nogogit.go @@ -11,7 +11,7 @@ func (repo *Repository) getBlob(id SHA1) (*Blob, error) { return nil, ErrNotExist{id.String(), ""} } return &Blob{ - ID: id, - repoPath: repo.Path, + ID: id, + repo: repo, }, nil } diff --git a/modules/git/repo_blob_test.go b/modules/git/repo_blob_test.go index 52a124db2a75..ccf418b3059f 100644 --- a/modules/git/repo_blob_test.go +++ b/modules/git/repo_blob_test.go @@ -33,9 +33,9 @@ func TestRepository_GetBlob_Found(t *testing.T) { dataReader, err := blob.DataAsync() assert.NoError(t, err) - defer dataReader.Close() data, err := ioutil.ReadAll(dataReader) + assert.NoError(t, dataReader.Close()) assert.NoError(t, err) assert.Equal(t, testCase.Data, data) } diff --git a/modules/git/repo_branch_gogit.go b/modules/git/repo_branch_gogit.go index b00253f6ffd6..e8386b2dbd98 100644 --- a/modules/git/repo_branch_gogit.go +++ b/modules/git/repo_branch_gogit.go @@ -13,6 +13,30 @@ import ( "github.com/go-git/go-git/v5/plumbing" ) +// IsObjectExist returns true if given reference exists in the repository. +func (repo *Repository) IsObjectExist(name string) bool { + if name == "" { + return false + } + + _, err := repo.gogitRepo.ResolveRevision(plumbing.Revision(name)) + + return err == nil +} + +// IsReferenceExist returns true if given reference exists in the repository. +func (repo *Repository) IsReferenceExist(name string) bool { + if name == "" { + return false + } + + reference, err := repo.gogitRepo.Reference(plumbing.ReferenceName(name), true) + if err != nil { + return false + } + return reference.Type() != plumbing.InvalidReference +} + // IsBranchExist returns true if given branch exists in current repository. func (repo *Repository) IsBranchExist(name string) bool { if name == "" { diff --git a/modules/git/repo_branch_nogogit.go b/modules/git/repo_branch_nogogit.go index 0628a572859c..7d10b8ba0fae 100644 --- a/modules/git/repo_branch_nogogit.go +++ b/modules/git/repo_branch_nogogit.go @@ -9,16 +9,54 @@ package git import ( "bufio" + "bytes" "io" "strings" + + "code.gitea.io/gitea/modules/log" ) +// IsObjectExist returns true if given reference exists in the repository. +func (repo *Repository) IsObjectExist(name string) bool { + if name == "" { + return false + } + + wr, rd, cancel := repo.CatFileBatchCheck() + defer cancel() + _, err := wr.Write([]byte(name + "\n")) + if err != nil { + log.Debug("Error writing to CatFileBatchCheck %v", err) + return false + } + sha, _, _, err := ReadBatchLine(rd) + return err == nil && bytes.HasPrefix(sha, []byte(strings.TrimSpace(name))) +} + +// IsReferenceExist returns true if given reference exists in the repository. +func (repo *Repository) IsReferenceExist(name string) bool { + if name == "" { + return false + } + + wr, rd, cancel := repo.CatFileBatchCheck() + defer cancel() + _, err := wr.Write([]byte(name + "\n")) + if err != nil { + log.Debug("Error writing to CatFileBatchCheck %v", err) + return false + } + _, _, _, err = ReadBatchLine(rd) + return err == nil +} + // IsBranchExist returns true if given branch exists in current repository. func (repo *Repository) IsBranchExist(name string) bool { if name == "" { return false } - return IsReferenceExist(repo.Path, BranchPrefix+name) + + return repo.IsReferenceExist(BranchPrefix + name) } // GetBranches returns branches from the repository, skipping skip initial branches and diff --git a/modules/git/repo_commit.go b/modules/git/repo_commit.go index 5e2db34fd18e..16ee5b2fd62f 100644 --- a/modules/git/repo_commit.go +++ b/modules/git/repo_commit.go @@ -12,6 +12,8 @@ import ( "io/ioutil" "strconv" "strings" + + "code.gitea.io/gitea/modules/setting" ) // GetBranchCommitID returns last commit ID string of given branch. @@ -24,27 +26,6 @@ func (repo *Repository) GetTagCommitID(name string) (string, error) { return repo.GetRefCommitID(TagPrefix + name) } -// ConvertToSHA1 returns a Hash object from a potential ID string -func (repo *Repository) ConvertToSHA1(commitID string) (SHA1, error) { - if len(commitID) == 40 { - sha1, err := NewIDFromString(commitID) - if err == nil { - return sha1, nil - } - } - - actualCommitID, err := NewCommand("rev-parse", "--verify", commitID).RunInDir(repo.Path) - if err != nil { - if strings.Contains(err.Error(), "unknown revision or path") || - strings.Contains(err.Error(), "fatal: Needed a single revision") { - return SHA1{}, ErrNotExist{commitID, ""} - } - return SHA1{}, err - } - - return NewIDFromString(actualCommitID) -} - // GetCommit returns commit object of by ID string. func (repo *Repository) GetCommit(commitID string) (*Commit, error) { id, err := repo.ConvertToSHA1(commitID) @@ -106,12 +87,6 @@ func (repo *Repository) GetCommitByPath(relpath string) (*Commit, error) { return commits.Front().Value.(*Commit), nil } -// CommitsRangeSize the default commits range size -var CommitsRangeSize = 50 - -// BranchesRangeSize the default branches range size -var BranchesRangeSize = 20 - func (repo *Repository) commitsByRange(id SHA1, page, pageSize int) (*list.List, error) { stdout, err := NewCommand("log", id.String(), "--skip="+strconv.Itoa((page-1)*pageSize), "--max-count="+strconv.Itoa(pageSize), prettyLogFormat).RunInDirBytes(repo.Path) @@ -135,7 +110,7 @@ func (repo *Repository) searchCommits(id SHA1, opts SearchCommitsOptions) (*list } } - // add commiters if present in search query + // add committers if present in search query if len(opts.Committers) > 0 { for _, v := range opts.Committers { args = append(args, "--committer="+v) @@ -175,7 +150,7 @@ func (repo *Repository) searchCommits(id SHA1, opts SearchCommitsOptions) (*list stdout = append(stdout, '\n') } - // if there are any keywords (ie not commiter:, author:, time:) + // if there are any keywords (ie not committer:, author:, time:) // then let's iterate over them if len(opts.Keywords) > 0 { for _, v := range opts.Keywords { @@ -220,14 +195,14 @@ func (repo *Repository) FileChangedBetweenCommits(filename, id1, id2 string) (bo return len(strings.TrimSpace(string(stdout))) > 0, nil } -// FileCommitsCount return the number of files at a revison +// FileCommitsCount return the number of files at a revision func (repo *Repository) FileCommitsCount(revision, file string) (int64, error) { return CommitsCountFiles(repo.Path, []string{revision}, []string{file}) } -// CommitsByFileAndRange return the commits according revison file and the page +// CommitsByFileAndRange return the commits according revision file and the page func (repo *Repository) CommitsByFileAndRange(revision, file string, page int) (*list.List, error) { - skip := (page - 1) * CommitsRangeSize + skip := (page - 1) * setting.Git.CommitsRangeSize stdoutReader, stdoutWriter := io.Pipe() defer func() { @@ -237,7 +212,7 @@ func (repo *Repository) CommitsByFileAndRange(revision, file string, page int) ( go func() { stderr := strings.Builder{} err := NewCommand("log", revision, "--follow", - "--max-count="+strconv.Itoa(CommitsRangeSize*page), + "--max-count="+strconv.Itoa(setting.Git.CommitsRangeSize*page), prettyLogFormat, "--", file). RunInDirPipeline(repo.Path, stdoutWriter, &stderr) if err != nil { @@ -265,10 +240,10 @@ func (repo *Repository) CommitsByFileAndRange(revision, file string, page int) ( return repo.parsePrettyFormatLogToList(stdout) } -// CommitsByFileAndRangeNoFollow return the commits according revison file and the page +// CommitsByFileAndRangeNoFollow return the commits according revision file and the page func (repo *Repository) CommitsByFileAndRangeNoFollow(revision, file string, page int) (*list.List, error) { stdout, err := NewCommand("log", revision, "--skip="+strconv.Itoa((page-1)*50), - "--max-count="+strconv.Itoa(CommitsRangeSize), prettyLogFormat, "--", file).RunInDirBytes(repo.Path) + "--max-count="+strconv.Itoa(setting.Git.CommitsRangeSize), prettyLogFormat, "--", file).RunInDirBytes(repo.Path) if err != nil { return nil, err } @@ -289,14 +264,15 @@ func (repo *Repository) FilesCountBetween(startCommitID, endCommitID string) (in return len(strings.Split(stdout, "\n")) - 1, nil } -// CommitsBetween returns a list that contains commits between [last, before). +// CommitsBetween returns a list that contains commits between [before, last). +// If before is detached (removed by reset + push) it is not included. func (repo *Repository) CommitsBetween(last *Commit, before *Commit) (*list.List, error) { var stdout []byte var err error if before == nil { stdout, err = NewCommand("rev-list", last.ID.String()).RunInDirBytes(repo.Path) } else { - stdout, err = NewCommand("rev-list", before.ID.String()+"..."+last.ID.String()).RunInDirBytes(repo.Path) + stdout, err = NewCommand("rev-list", before.ID.String()+".."+last.ID.String()).RunInDirBytes(repo.Path) if err != nil && strings.Contains(err.Error(), "no merge base") { // future versions of git >= 2.28 are likely to return an error if before and last have become unrelated. // previously it would return the results of git rev-list before last so let's try that... @@ -309,14 +285,14 @@ func (repo *Repository) CommitsBetween(last *Commit, before *Commit) (*list.List return repo.parsePrettyFormatLogToList(bytes.TrimSpace(stdout)) } -// CommitsBetweenLimit returns a list that contains at most limit commits skipping the first skip commits between [last, before) +// CommitsBetweenLimit returns a list that contains at most limit commits skipping the first skip commits between [before, last) func (repo *Repository) CommitsBetweenLimit(last *Commit, before *Commit, limit, skip int) (*list.List, error) { var stdout []byte var err error if before == nil { stdout, err = NewCommand("rev-list", "--max-count", strconv.Itoa(limit), "--skip", strconv.Itoa(skip), last.ID.String()).RunInDirBytes(repo.Path) } else { - stdout, err = NewCommand("rev-list", "--max-count", strconv.Itoa(limit), "--skip", strconv.Itoa(skip), before.ID.String()+"..."+last.ID.String()).RunInDirBytes(repo.Path) + stdout, err = NewCommand("rev-list", "--max-count", strconv.Itoa(limit), "--skip", strconv.Itoa(skip), before.ID.String()+".."+last.ID.String()).RunInDirBytes(repo.Path) if err != nil && strings.Contains(err.Error(), "no merge base") { // future versions of git >= 2.28 are likely to return an error if before and last have become unrelated. // previously it would return the results of git rev-list --max-count n before last so let's try that... @@ -347,7 +323,7 @@ func (repo *Repository) CommitsBetweenIDs(last, before string) (*list.List, erro // CommitsCountBetween return numbers of commits between two commits func (repo *Repository) CommitsCountBetween(start, end string) (int64, error) { - count, err := CommitsCountFiles(repo.Path, []string{start + "..." + end}, []string{}) + count, err := CommitsCountFiles(repo.Path, []string{start + ".." + end}, []string{}) if err != nil && strings.Contains(err.Error(), "no merge base") { // future versions of git >= 2.28 are likely to return an error if before and last have become unrelated. // previously it would return the results of git rev-list before last so let's try that... diff --git a/modules/git/repo_commit_gogit.go b/modules/git/repo_commit_gogit.go index 48b0cfe19d06..2f9b1c4206f4 100644 --- a/modules/git/repo_commit_gogit.go +++ b/modules/git/repo_commit_gogit.go @@ -30,6 +30,27 @@ func (repo *Repository) GetRefCommitID(name string) (string, error) { return ref.Hash().String(), nil } +// ConvertToSHA1 returns a Hash object from a potential ID string +func (repo *Repository) ConvertToSHA1(commitID string) (SHA1, error) { + if len(commitID) == 40 { + sha1, err := NewIDFromString(commitID) + if err == nil { + return sha1, nil + } + } + + actualCommitID, err := NewCommand("rev-parse", "--verify", commitID).RunInDir(repo.Path) + if err != nil { + if strings.Contains(err.Error(), "unknown revision or path") || + strings.Contains(err.Error(), "fatal: Needed a single revision") { + return SHA1{}, ErrNotExist{commitID, ""} + } + return SHA1{}, err + } + + return NewIDFromString(actualCommitID) +} + // IsCommitExist returns true if given commit exists in current repository. func (repo *Repository) IsCommitExist(name string) bool { hash := plumbing.NewHash(name) diff --git a/modules/git/repo_commit_nogogit.go b/modules/git/repo_commit_nogogit.go index df56b26b0159..afd5166f1d54 100644 --- a/modules/git/repo_commit_nogogit.go +++ b/modules/git/repo_commit_nogogit.go @@ -11,9 +11,9 @@ import ( "errors" "io" "io/ioutil" - "os" - "path/filepath" "strings" + + "code.gitea.io/gitea/modules/log" ) // ResolveReference resolves a name to a reference @@ -35,27 +35,15 @@ func (repo *Repository) ResolveReference(name string) (string, error) { // GetRefCommitID returns the last commit ID string of given reference (branch or tag). func (repo *Repository) GetRefCommitID(name string) (string, error) { - if strings.HasPrefix(name, "refs/") { - // We're gonna try just reading the ref file as this is likely to be quicker than other options - fileInfo, err := os.Lstat(filepath.Join(repo.Path, name)) - if err == nil && fileInfo.Mode().IsRegular() && fileInfo.Size() == 41 { - ref, err := ioutil.ReadFile(filepath.Join(repo.Path, name)) - - if err == nil && SHAPattern.Match(ref[:40]) && ref[40] == '\n' { - return string(ref[:40]), nil - } - } - } - - stdout, err := NewCommand("show-ref", "--verify", "--hash", name).RunInDir(repo.Path) - if err != nil { - if strings.Contains(err.Error(), "not a valid ref") { - return "", ErrNotExist{name, ""} - } - return "", err + wr, rd, cancel := repo.CatFileBatchCheck() + defer cancel() + _, _ = wr.Write([]byte(name + "\n")) + shaBs, _, _, err := ReadBatchLine(rd) + if IsErrNotExist(err) { + return "", ErrNotExist{name, ""} } - return strings.TrimSpace(stdout), nil + return string(shaBs), nil } // IsCommitExist returns true if given commit exists in current repository. @@ -65,31 +53,18 @@ func (repo *Repository) IsCommitExist(name string) bool { } func (repo *Repository) getCommit(id SHA1) (*Commit, error) { - stdoutReader, stdoutWriter := io.Pipe() - defer func() { - _ = stdoutReader.Close() - _ = stdoutWriter.Close() - }() - - go func() { - stderr := strings.Builder{} - err := NewCommand("cat-file", "--batch").RunInDirFullPipeline(repo.Path, stdoutWriter, &stderr, strings.NewReader(id.String()+"\n")) - if err != nil { - _ = stdoutWriter.CloseWithError(ConcatenateError(err, (&stderr).String())) - } else { - _ = stdoutWriter.Close() - } - }() + wr, rd, cancel := repo.CatFileBatch() + defer cancel() - bufReader := bufio.NewReader(stdoutReader) + _, _ = wr.Write([]byte(id.String() + "\n")) - return repo.getCommitFromBatchReader(bufReader, id) + return repo.getCommitFromBatchReader(rd, id) } -func (repo *Repository) getCommitFromBatchReader(bufReader *bufio.Reader, id SHA1) (*Commit, error) { - _, typ, size, err := ReadBatchLine(bufReader) +func (repo *Repository) getCommitFromBatchReader(rd *bufio.Reader, id SHA1) (*Commit, error) { + _, typ, size, err := ReadBatchLine(rd) if err != nil { - if errors.Is(err, io.EOF) { + if errors.Is(err, io.EOF) || IsErrNotExist(err) { return nil, ErrNotExist{ID: id.String()} } return nil, err @@ -101,7 +76,11 @@ func (repo *Repository) getCommitFromBatchReader(bufReader *bufio.Reader, id SHA case "tag": // then we need to parse the tag // and load the commit - data, err := ioutil.ReadAll(io.LimitReader(bufReader, size)) + data, err := ioutil.ReadAll(io.LimitReader(rd, size)) + if err != nil { + return nil, err + } + _, err = rd.Discard(1) if err != nil { return nil, err } @@ -122,11 +101,50 @@ func (repo *Repository) getCommitFromBatchReader(bufReader *bufio.Reader, id SHA return commit, nil case "commit": - return CommitFromReader(repo, id, io.LimitReader(bufReader, size)) + commit, err := CommitFromReader(repo, id, io.LimitReader(rd, size)) + if err != nil { + return nil, err + } + _, err = rd.Discard(1) + if err != nil { + return nil, err + } + + return commit, nil default: - log("Unknown typ: %s", typ) + log.Debug("Unknown typ: %s", typ) + _, err = rd.Discard(int(size) + 1) + if err != nil { + return nil, err + } return nil, ErrNotExist{ ID: id.String(), } } } + +// ConvertToSHA1 returns a Hash object from a potential ID string +func (repo *Repository) ConvertToSHA1(commitID string) (SHA1, error) { + if len(commitID) == 40 && SHAPattern.MatchString(commitID) { + sha1, err := NewIDFromString(commitID) + if err == nil { + return sha1, nil + } + } + + wr, rd, cancel := repo.CatFileBatchCheck() + defer cancel() + _, err := wr.Write([]byte(commitID + "\n")) + if err != nil { + return SHA1{}, err + } + sha, _, _, err := ReadBatchLine(rd) + if err != nil { + if IsErrNotExist(err) { + return SHA1{}, ErrNotExist{commitID, ""} + } + return SHA1{}, err + } + + return MustIDFromString(string(sha)), nil +} diff --git a/modules/git/repo_commit_test.go b/modules/git/repo_commit_test.go index 3eedaa6b6e9b..a6c27ea4d55b 100644 --- a/modules/git/repo_commit_test.go +++ b/modules/git/repo_commit_test.go @@ -72,9 +72,31 @@ func TestIsCommitInBranch(t *testing.T) { result, err := bareRepo1.IsCommitInBranch("2839944139e0de9737a044f78b0e4b40d989a9e3", "branch1") assert.NoError(t, err) - assert.Equal(t, true, result) + assert.True(t, result) result, err = bareRepo1.IsCommitInBranch("2839944139e0de9737a044f78b0e4b40d989a9e3", "branch2") assert.NoError(t, err) - assert.Equal(t, false, result) + assert.False(t, result) +} + +func TestRepository_CommitsBetweenIDs(t *testing.T) { + bareRepo1Path := filepath.Join(testReposDir, "repo4_commitsbetween") + bareRepo1, err := OpenRepository(bareRepo1Path) + assert.NoError(t, err) + defer bareRepo1.Close() + + cases := []struct { + OldID string + NewID string + ExpectedCommits int + }{ + {"fdc1b615bdcff0f0658b216df0c9209e5ecb7c78", "78a445db1eac62fe15e624e1137965969addf344", 1}, //com1 -> com2 + {"78a445db1eac62fe15e624e1137965969addf344", "fdc1b615bdcff0f0658b216df0c9209e5ecb7c78", 0}, //reset HEAD~, com2 -> com1 + {"78a445db1eac62fe15e624e1137965969addf344", "a78e5638b66ccfe7e1b4689d3d5684e42c97d7ca", 1}, //com2 -> com2_new + } + for i, c := range cases { + commits, err := bareRepo1.CommitsBetweenIDs(c.NewID, c.OldID) + assert.NoError(t, err) + assert.Equal(t, c.ExpectedCommits, commits.Len(), "case %d", i) + } } diff --git a/modules/git/repo_language_stats_nogogit.go b/modules/git/repo_language_stats_nogogit.go index 3f197f8d74e9..1684f21d1675 100644 --- a/modules/git/repo_language_stats_nogogit.go +++ b/modules/git/repo_language_stats_nogogit.go @@ -13,6 +13,7 @@ import ( "math" "code.gitea.io/gitea/modules/analyze" + "code.gitea.io/gitea/modules/log" "github.com/go-enry/go-enry/v2" ) @@ -21,15 +22,11 @@ import ( func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, error) { // We will feed the commit IDs in order into cat-file --batch, followed by blobs as necessary. // so let's create a batch stdin and stdout - batchStdinWriter, batchReader, cancel := CatFileBatch(repo.Path) + batchStdinWriter, batchReader, cancel := repo.CatFileBatch() defer cancel() writeID := func(id string) error { - _, err := batchStdinWriter.Write([]byte(id)) - if err != nil { - return err - } - _, err = batchStdinWriter.Write([]byte{'\n'}) + _, err := batchStdinWriter.Write([]byte(id + "\n")) return err } @@ -38,19 +35,22 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err } shaBytes, typ, size, err := ReadBatchLine(batchReader) if typ != "commit" { - log("Unable to get commit for: %s. Err: %v", commitID, err) + log.Debug("Unable to get commit for: %s. Err: %v", commitID, err) return nil, ErrNotExist{commitID, ""} } sha, err := NewIDFromString(string(shaBytes)) if err != nil { - log("Unable to get commit for: %s. Err: %v", commitID, err) + log.Debug("Unable to get commit for: %s. Err: %v", commitID, err) return nil, ErrNotExist{commitID, ""} } commit, err := CommitFromReader(repo, sha, io.LimitReader(batchReader, size)) if err != nil { - log("Unable to get commit for: %s. Err: %v", commitID, err) + log.Debug("Unable to get commit for: %s. Err: %v", commitID, err) + return nil, err + } + if _, err = batchReader.Discard(1); err != nil { return nil, err } @@ -80,15 +80,15 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err } _, _, size, err := ReadBatchLine(batchReader) if err != nil { - log("Error reading blob: %s Err: %v", f.ID.String(), err) + log.Debug("Error reading blob: %s Err: %v", f.ID.String(), err) return nil, err } sizeToRead := size - discard := int64(0) + discard := int64(1) if size > fileSizeLimit { sizeToRead = fileSizeLimit - discard = size - fileSizeLimit + discard = size - fileSizeLimit + 1 } _, err = contentBuf.ReadFrom(io.LimitReader(batchReader, sizeToRead)) diff --git a/modules/git/repo_stats.go b/modules/git/repo_stats.go index cb2a2bcf5102..aca5ab21ccb2 100644 --- a/modules/git/repo_stats.go +++ b/modules/git/repo_stats.go @@ -33,7 +33,7 @@ type CodeActivityAuthor struct { Commits int64 } -// GetCodeActivityStats returns code statistics for acitivity page +// GetCodeActivityStats returns code statistics for activity page func (repo *Repository) GetCodeActivityStats(fromTime time.Time, branch string) (*CodeActivityStats, error) { stats := &CodeActivityStats{} diff --git a/modules/git/repo_tag.go b/modules/git/repo_tag.go index 59ab7020964d..d91c3ca97973 100644 --- a/modules/git/repo_tag.go +++ b/modules/git/repo_tag.go @@ -8,6 +8,8 @@ package git import ( "fmt" "strings" + + "code.gitea.io/gitea/modules/log" ) // TagPrefix tags prefix path on the repository @@ -33,7 +35,7 @@ func (repo *Repository) CreateAnnotatedTag(name, message, revision string) error func (repo *Repository) getTag(tagID SHA1, name string) (*Tag, error) { t, ok := repo.tagCache.Get(tagID.String()) if ok { - log("Hit cache: %s", tagID) + log.Debug("Hit cache: %s", tagID) tagClone := *t.(*Tag) tagClone.Name = name // This is necessary because lightweight tags may have same id return &tagClone, nil diff --git a/modules/git/repo_tag_nogogit.go b/modules/git/repo_tag_nogogit.go index b3fa5d6dc407..a9e122aeaa7d 100644 --- a/modules/git/repo_tag_nogogit.go +++ b/modules/git/repo_tag_nogogit.go @@ -9,7 +9,11 @@ package git // IsTagExist returns true if given tag exists in the repository. func (repo *Repository) IsTagExist(name string) bool { - return IsReferenceExist(repo.Path, TagPrefix+name) + if name == "" { + return false + } + + return repo.IsReferenceExist(TagPrefix + name) } // GetTags returns all tags of the repository. diff --git a/modules/git/repo_test.go b/modules/git/repo_test.go index 0b6986764c05..9517783e6bbf 100644 --- a/modules/git/repo_test.go +++ b/modules/git/repo_test.go @@ -7,23 +7,18 @@ package git import ( "path/filepath" "testing" - "time" "github.com/stretchr/testify/assert" ) func TestGetLatestCommitTime(t *testing.T) { - lct, err := GetLatestCommitTime(".") + bareRepo1Path := filepath.Join(testReposDir, "repo1_bare") + lct, err := GetLatestCommitTime(bareRepo1Path) assert.NoError(t, err) - // Time is in the past - now := time.Now() - assert.True(t, lct.Unix() < now.Unix(), "%d not smaller than %d", lct, now) - // Time is after Mon Oct 23 03:52:09 2017 +0300 + // Time is Sun Jul 21 22:43:13 2019 +0200 // which is the time of commit - // d47b98c44c9a6472e44ab80efe65235e11c6da2a - refTime, err := time.Parse("Mon Jan 02 15:04:05 2006 -0700", "Mon Oct 23 03:52:09 2017 +0300") - assert.NoError(t, err) - assert.True(t, lct.Unix() > refTime.Unix(), "%d not greater than %d", lct, refTime) + // feaf4ba6bc635fec442f46ddd4512416ec43c2c2 (refs/heads/master) + assert.EqualValues(t, 1563741793, lct.Unix()) } func TestRepoIsEmpty(t *testing.T) { diff --git a/modules/git/repo_tree_nogogit.go b/modules/git/repo_tree_nogogit.go index 867c3fa5aae0..967f8aea3f44 100644 --- a/modules/git/repo_tree_nogogit.go +++ b/modules/git/repo_tree_nogogit.go @@ -7,33 +7,18 @@ package git import ( - "bufio" - "fmt" "io" "io/ioutil" - "strings" ) func (repo *Repository) getTree(id SHA1) (*Tree, error) { - stdoutReader, stdoutWriter := io.Pipe() - defer func() { - _ = stdoutReader.Close() - _ = stdoutWriter.Close() - }() + wr, rd, cancel := repo.CatFileBatch() + defer cancel() - go func() { - stderr := &strings.Builder{} - err := NewCommand("cat-file", "--batch").RunInDirFullPipeline(repo.Path, stdoutWriter, stderr, strings.NewReader(id.String()+"\n")) - if err != nil { - _ = stdoutWriter.CloseWithError(ConcatenateError(err, stderr.String())) - } else { - _ = stdoutWriter.Close() - } - }() + _, _ = wr.Write([]byte(id.String() + "\n")) - bufReader := bufio.NewReader(stdoutReader) // ignore the SHA - _, typ, size, err := ReadBatchLine(bufReader) + _, typ, size, err := ReadBatchLine(rd) if err != nil { return nil, err } @@ -41,7 +26,7 @@ func (repo *Repository) getTree(id SHA1) (*Tree, error) { switch typ { case "tag": resolvedID := id - data, err := ioutil.ReadAll(io.LimitReader(bufReader, size)) + data, err := ioutil.ReadAll(io.LimitReader(rd, size)) if err != nil { return nil, err } @@ -54,24 +39,27 @@ func (repo *Repository) getTree(id SHA1) (*Tree, error) { return nil, err } commit.Tree.ResolvedID = resolvedID - log("tag.commit.Tree: %s %v", commit.Tree.ID.String(), commit.Tree.repo) return &commit.Tree, nil case "commit": - commit, err := CommitFromReader(repo, id, io.LimitReader(bufReader, size)) + commit, err := CommitFromReader(repo, id, io.LimitReader(rd, size)) if err != nil { - _ = stdoutReader.CloseWithError(err) + return nil, err + } + if _, err := rd.Discard(1); err != nil { return nil, err } commit.Tree.ResolvedID = commit.ID - log("commit.Tree: %s %v", commit.Tree.ID.String(), commit.Tree.repo) return &commit.Tree, nil case "tree": - stdoutReader.Close() tree := NewTree(repo, id) tree.ResolvedID = id + tree.entries, err = catBatchParseTreeEntries(tree, rd, size) + if err != nil { + return nil, err + } + tree.entriesParsed = true return tree, nil default: - _ = stdoutReader.CloseWithError(fmt.Errorf("unknown typ: %s", typ)) return nil, ErrNotExist{ ID: id.String(), } @@ -81,12 +69,12 @@ func (repo *Repository) getTree(id SHA1) (*Tree, error) { // GetTree find the tree object in the repository. func (repo *Repository) GetTree(idStr string) (*Tree, error) { if len(idStr) != 40 { - res, err := NewCommand("rev-parse", "--verify", idStr).RunInDir(repo.Path) + res, err := repo.GetRefCommitID(idStr) if err != nil { return nil, err } if len(res) > 0 { - idStr = res[:len(res)-1] + idStr = res } } id, err := NewIDFromString(idStr) diff --git a/modules/git/tag.go b/modules/git/tag.go index 0323cc42edd7..23f09e25b6c8 100644 --- a/modules/git/tag.go +++ b/modules/git/tag.go @@ -35,6 +35,7 @@ func (tag *Tag) Commit() (*Commit, error) { // \n\n separate headers from message func parseTagData(data []byte) (*Tag, error) { tag := new(Tag) + tag.Tagger = &Signature{} // we now have the contents of the commit object. Let's investigate... nextline := 0 l: diff --git a/modules/git/tests/repos/repo4_commitsbetween/HEAD b/modules/git/tests/repos/repo4_commitsbetween/HEAD new file mode 100644 index 000000000000..b870d82622c1 --- /dev/null +++ b/modules/git/tests/repos/repo4_commitsbetween/HEAD @@ -0,0 +1 @@ +ref: refs/heads/main diff --git a/modules/git/tests/repos/repo4_commitsbetween/config b/modules/git/tests/repos/repo4_commitsbetween/config new file mode 100644 index 000000000000..d545cdabdbdd --- /dev/null +++ b/modules/git/tests/repos/repo4_commitsbetween/config @@ -0,0 +1,7 @@ +[core] + repositoryformatversion = 0 + filemode = false + bare = false + logallrefupdates = true + symlinks = false + ignorecase = true diff --git a/modules/git/tests/repos/repo4_commitsbetween/logs/HEAD b/modules/git/tests/repos/repo4_commitsbetween/logs/HEAD new file mode 100644 index 000000000000..24cc684baef2 --- /dev/null +++ b/modules/git/tests/repos/repo4_commitsbetween/logs/HEAD @@ -0,0 +1,4 @@ +0000000000000000000000000000000000000000 fdc1b615bdcff0f0658b216df0c9209e5ecb7c78 KN4CK3R 1624915979 +0200 commit (initial): com1 +fdc1b615bdcff0f0658b216df0c9209e5ecb7c78 78a445db1eac62fe15e624e1137965969addf344 KN4CK3R 1624915993 +0200 commit: com2 +78a445db1eac62fe15e624e1137965969addf344 fdc1b615bdcff0f0658b216df0c9209e5ecb7c78 KN4CK3R 1624916008 +0200 reset: moving to HEAD~1 +fdc1b615bdcff0f0658b216df0c9209e5ecb7c78 a78e5638b66ccfe7e1b4689d3d5684e42c97d7ca KN4CK3R 1624916029 +0200 commit: com2_new diff --git a/modules/git/tests/repos/repo4_commitsbetween/logs/refs/heads/main b/modules/git/tests/repos/repo4_commitsbetween/logs/refs/heads/main new file mode 100644 index 000000000000..24cc684baef2 --- /dev/null +++ b/modules/git/tests/repos/repo4_commitsbetween/logs/refs/heads/main @@ -0,0 +1,4 @@ +0000000000000000000000000000000000000000 fdc1b615bdcff0f0658b216df0c9209e5ecb7c78 KN4CK3R 1624915979 +0200 commit (initial): com1 +fdc1b615bdcff0f0658b216df0c9209e5ecb7c78 78a445db1eac62fe15e624e1137965969addf344 KN4CK3R 1624915993 +0200 commit: com2 +78a445db1eac62fe15e624e1137965969addf344 fdc1b615bdcff0f0658b216df0c9209e5ecb7c78 KN4CK3R 1624916008 +0200 reset: moving to HEAD~1 +fdc1b615bdcff0f0658b216df0c9209e5ecb7c78 a78e5638b66ccfe7e1b4689d3d5684e42c97d7ca KN4CK3R 1624916029 +0200 commit: com2_new diff --git a/modules/git/tests/repos/repo4_commitsbetween/objects/27/734c860ab19650d48e71f9f12d9bd194ed82ea b/modules/git/tests/repos/repo4_commitsbetween/objects/27/734c860ab19650d48e71f9f12d9bd194ed82ea new file mode 100644 index 000000000000..5b26f8b3a8c1 Binary files /dev/null and b/modules/git/tests/repos/repo4_commitsbetween/objects/27/734c860ab19650d48e71f9f12d9bd194ed82ea differ diff --git a/modules/git/tests/repos/repo4_commitsbetween/objects/56/a6051ca2b02b04ef92d5150c9ef600403cb1de b/modules/git/tests/repos/repo4_commitsbetween/objects/56/a6051ca2b02b04ef92d5150c9ef600403cb1de new file mode 100644 index 000000000000..b17dfe30e64f Binary files /dev/null and b/modules/git/tests/repos/repo4_commitsbetween/objects/56/a6051ca2b02b04ef92d5150c9ef600403cb1de differ diff --git a/modules/git/tests/repos/repo4_commitsbetween/objects/78/a445db1eac62fe15e624e1137965969addf344 b/modules/git/tests/repos/repo4_commitsbetween/objects/78/a445db1eac62fe15e624e1137965969addf344 new file mode 100644 index 000000000000..6d23de052ee7 --- /dev/null +++ b/modules/git/tests/repos/repo4_commitsbetween/objects/78/a445db1eac62fe15e624e1137965969addf344 @@ -0,0 +1,3 @@ +xM +0@a=E̤I$Zl|G)îm̊uO"&`8GtI7#n6%09)8F(hl@MuS\1y=%?iu"O +Dmڃwź{pC_ \ No newline at end of file diff --git a/modules/git/tests/repos/repo4_commitsbetween/objects/a7/8e5638b66ccfe7e1b4689d3d5684e42c97d7ca b/modules/git/tests/repos/repo4_commitsbetween/objects/a7/8e5638b66ccfe7e1b4689d3d5684e42c97d7ca new file mode 100644 index 000000000000..d5c554a542dc Binary files /dev/null and b/modules/git/tests/repos/repo4_commitsbetween/objects/a7/8e5638b66ccfe7e1b4689d3d5684e42c97d7ca differ diff --git a/modules/git/tests/repos/repo4_commitsbetween/objects/ad/74ceca1b8fde10c7d933bd2e56d347dddb4ab5 b/modules/git/tests/repos/repo4_commitsbetween/objects/ad/74ceca1b8fde10c7d933bd2e56d347dddb4ab5 new file mode 100644 index 000000000000..26ed78500612 Binary files /dev/null and b/modules/git/tests/repos/repo4_commitsbetween/objects/ad/74ceca1b8fde10c7d933bd2e56d347dddb4ab5 differ diff --git a/modules/git/tests/repos/repo4_commitsbetween/objects/b5/d8dd0ddd9d8d752bb47b5f781f09f478316098 b/modules/git/tests/repos/repo4_commitsbetween/objects/b5/d8dd0ddd9d8d752bb47b5f781f09f478316098 new file mode 100644 index 000000000000..8060b57df037 Binary files /dev/null and b/modules/git/tests/repos/repo4_commitsbetween/objects/b5/d8dd0ddd9d8d752bb47b5f781f09f478316098 differ diff --git a/modules/git/tests/repos/repo4_commitsbetween/objects/d8/263ee9860594d2806b0dfd1bfd17528b0ba2a4 b/modules/git/tests/repos/repo4_commitsbetween/objects/d8/263ee9860594d2806b0dfd1bfd17528b0ba2a4 new file mode 100644 index 000000000000..4b1baefffb3b Binary files /dev/null and b/modules/git/tests/repos/repo4_commitsbetween/objects/d8/263ee9860594d2806b0dfd1bfd17528b0ba2a4 differ diff --git a/modules/git/tests/repos/repo4_commitsbetween/objects/e2/3cc6a008501f1491b0480cedaef160e41cf684 b/modules/git/tests/repos/repo4_commitsbetween/objects/e2/3cc6a008501f1491b0480cedaef160e41cf684 new file mode 100644 index 000000000000..0a70530845aa Binary files /dev/null and b/modules/git/tests/repos/repo4_commitsbetween/objects/e2/3cc6a008501f1491b0480cedaef160e41cf684 differ diff --git a/modules/git/tests/repos/repo4_commitsbetween/objects/fd/c1b615bdcff0f0658b216df0c9209e5ecb7c78 b/modules/git/tests/repos/repo4_commitsbetween/objects/fd/c1b615bdcff0f0658b216df0c9209e5ecb7c78 new file mode 100644 index 000000000000..2e6d94584ca3 Binary files /dev/null and b/modules/git/tests/repos/repo4_commitsbetween/objects/fd/c1b615bdcff0f0658b216df0c9209e5ecb7c78 differ diff --git a/modules/git/tests/repos/repo4_commitsbetween/refs/heads/main b/modules/git/tests/repos/repo4_commitsbetween/refs/heads/main new file mode 100644 index 000000000000..9e1b981a6ecf --- /dev/null +++ b/modules/git/tests/repos/repo4_commitsbetween/refs/heads/main @@ -0,0 +1 @@ +a78e5638b66ccfe7e1b4689d3d5684e42c97d7ca diff --git a/modules/git/tree_blob_nogogit.go b/modules/git/tree_blob_nogogit.go index 6da0ccfe8e56..fdd8d79c8b63 100644 --- a/modules/git/tree_blob_nogogit.go +++ b/modules/git/tree_blob_nogogit.go @@ -15,6 +15,7 @@ import ( func (t *Tree) GetTreeEntryByPath(relpath string) (*TreeEntry, error) { if len(relpath) == 0 { return &TreeEntry{ + ptree: t, ID: t.ID, name: "", fullName: "", diff --git a/modules/git/tree_entry.go b/modules/git/tree_entry.go index 498767a63eb0..3644d00f36e2 100644 --- a/modules/git/tree_entry.go +++ b/modules/git/tree_entry.go @@ -34,12 +34,19 @@ func (te *TreeEntry) FollowLink() (*TreeEntry, error) { if err != nil { return nil, err } - defer r.Close() + closed := false + defer func() { + if !closed { + _ = r.Close() + } + }() buf := make([]byte, te.Size()) _, err = io.ReadFull(r, buf) if err != nil { return nil, err } + _ = r.Close() + closed = true lnk := string(buf) t := te.ptree diff --git a/modules/git/tree_entry_nogogit.go b/modules/git/tree_entry_nogogit.go index fd60de36f528..41356ceba232 100644 --- a/modules/git/tree_entry_nogogit.go +++ b/modules/git/tree_entry_nogogit.go @@ -84,10 +84,10 @@ func (te *TreeEntry) IsExecutable() bool { // Blob returns the blob object the entry func (te *TreeEntry) Blob() *Blob { return &Blob{ - ID: te.ID, - repoPath: te.ptree.repo.Path, - name: te.Name(), - size: te.size, - gotSize: te.sized, + ID: te.ID, + name: te.Name(), + size: te.size, + gotSize: te.sized, + repo: te.ptree.repo, } } diff --git a/modules/git/tree_entry_test.go b/modules/git/tree_entry_test.go index 16cfbc4fc3ad..3382de41028d 100644 --- a/modules/git/tree_entry_test.go +++ b/modules/git/tree_entry_test.go @@ -71,33 +71,33 @@ func TestFollowLink(t *testing.T) { // should be able to dereference to target target, err := lnk.FollowLink() assert.NoError(t, err) - assert.Equal(t, target.Name(), "hello") + assert.Equal(t, "hello", target.Name()) assert.False(t, target.IsLink()) - assert.Equal(t, target.ID.String(), "b14df6442ea5a1b382985a6549b85d435376c351") + assert.Equal(t, "b14df6442ea5a1b382985a6549b85d435376c351", target.ID.String()) // should error when called on normal file target, err = commit.Tree.GetTreeEntryByPath("file1.txt") assert.NoError(t, err) _, err = target.FollowLink() - assert.Equal(t, err.Error(), "file1.txt: not a symlink") + assert.EqualError(t, err, "file1.txt: not a symlink") // should error for broken links target, err = commit.Tree.GetTreeEntryByPath("foo/broken_link") assert.NoError(t, err) assert.True(t, target.IsLink()) _, err = target.FollowLink() - assert.Equal(t, err.Error(), "broken_link: broken link") + assert.EqualError(t, err, "broken_link: broken link") // should error for external links target, err = commit.Tree.GetTreeEntryByPath("foo/outside_repo") assert.NoError(t, err) assert.True(t, target.IsLink()) _, err = target.FollowLink() - assert.Equal(t, err.Error(), "outside_repo: points outside of repo") + assert.EqualError(t, err, "outside_repo: points outside of repo") // testing fix for short link bug target, err = commit.Tree.GetTreeEntryByPath("foo/link_short") assert.NoError(t, err) _, err = target.FollowLink() - assert.Equal(t, err.Error(), "link_short: broken link") + assert.EqualError(t, err, "link_short: broken link") } diff --git a/modules/git/tree_nogogit.go b/modules/git/tree_nogogit.go index 3ebdf10631db..9661d8faea15 100644 --- a/modules/git/tree_nogogit.go +++ b/modules/git/tree_nogogit.go @@ -7,6 +7,8 @@ package git import ( + "io" + "math" "strings" ) @@ -32,6 +34,52 @@ func (t *Tree) ListEntries() (Entries, error) { return t.entries, nil } + if t.repo != nil { + wr, rd, cancel := t.repo.CatFileBatch() + defer cancel() + + _, _ = wr.Write([]byte(t.ID.String() + "\n")) + _, typ, sz, err := ReadBatchLine(rd) + if err != nil { + return nil, err + } + if typ == "commit" { + treeID, err := ReadTreeID(rd, sz) + if err != nil && err != io.EOF { + return nil, err + } + _, _ = wr.Write([]byte(treeID + "\n")) + _, typ, sz, err = ReadBatchLine(rd) + if err != nil { + return nil, err + } + } + if typ == "tree" { + t.entries, err = catBatchParseTreeEntries(t, rd, sz) + if err != nil { + return nil, err + } + t.entriesParsed = true + return t.entries, nil + } + + // Not a tree just use ls-tree instead + for sz > math.MaxInt32 { + discarded, err := rd.Discard(math.MaxInt32) + sz -= int64(discarded) + if err != nil { + return nil, err + } + } + for sz > 0 { + discarded, err := rd.Discard(int(sz)) + sz -= int64(discarded) + if err != nil { + return nil, err + } + } + } + stdout, err := NewCommand("ls-tree", "-l", t.ID.String()).RunInDirBytes(t.repo.Path) if err != nil { if strings.Contains(err.Error(), "fatal: Not a valid object name") || strings.Contains(err.Error(), "fatal: not a tree object") { diff --git a/modules/git/utils.go b/modules/git/utils.go index d95218941606..13926fba72df 100644 --- a/modules/git/utils.go +++ b/modules/git/utils.go @@ -13,7 +13,7 @@ import ( "sync" ) -// ObjectCache provides thread-safe cache opeations. +// ObjectCache provides thread-safe cache operations. type ObjectCache struct { lock sync.RWMutex cache map[string]interface{} diff --git a/modules/graceful/context.go b/modules/graceful/context.go index 1ad1109b4e5b..9d955329a42b 100644 --- a/modules/graceful/context.go +++ b/modules/graceful/context.go @@ -6,17 +6,9 @@ package graceful import ( "context" - "fmt" "time" ) -// Errors for context.Err() -var ( - ErrShutdown = fmt.Errorf("Graceful Manager called Shutdown") - ErrHammer = fmt.Errorf("Graceful Manager called Hammer") - ErrTerminate = fmt.Errorf("Graceful Manager called Terminate") -) - // ChannelContext is a context that wraps a channel and error as a context type ChannelContext struct { done <-chan struct{} @@ -63,28 +55,19 @@ func (ctx *ChannelContext) Value(key interface{}) interface{} { // Callers using this context should ensure that they are registered as a running server // in order that they are waited for. func (g *Manager) ShutdownContext() context.Context { - return &ChannelContext{ - done: g.IsShutdown(), - err: ErrShutdown, - } + return g.shutdownCtx } // HammerContext returns a context.Context that is Done at hammer // Callers using this context should ensure that they are registered as a running server // in order that they are waited for. func (g *Manager) HammerContext() context.Context { - return &ChannelContext{ - done: g.IsHammer(), - err: ErrHammer, - } + return g.hammerCtx } // TerminateContext returns a context.Context that is Done at terminate // Callers using this context should ensure that they are registered as a terminating server // in order that they are waited for. func (g *Manager) TerminateContext() context.Context { - return &ChannelContext{ - done: g.IsTerminate(), - err: ErrTerminate, - } + return g.terminateCtx } diff --git a/modules/graceful/manager.go b/modules/graceful/manager.go index 903d05ed21f4..8c3b95c4aa74 100644 --- a/modules/graceful/manager.go +++ b/modules/graceful/manager.go @@ -54,8 +54,8 @@ func InitManager(ctx context.Context) { }) } -// CallbackWithContext is combined runnable and context to watch to see if the caller has finished -type CallbackWithContext func(ctx context.Context, callback func()) +// WithCallback is a runnable to call when the caller has finished +type WithCallback func(callback func()) // RunnableWithShutdownFns is a runnable with functions to run at shutdown and terminate // After the callback to atShutdown is called and is complete, the main function must return. @@ -63,7 +63,7 @@ type CallbackWithContext func(ctx context.Context, callback func()) // Please note that use of the atShutdown and atTerminate callbacks will create go-routines that will wait till their respective signals // - users must therefore be careful to only call these as necessary. // If run is not expected to run indefinitely RunWithShutdownChan is likely to be more appropriate. -type RunnableWithShutdownFns func(atShutdown, atTerminate func(context.Context, func())) +type RunnableWithShutdownFns func(atShutdown, atTerminate func(func())) // RunWithShutdownFns takes a function that has both atShutdown and atTerminate callbacks // After the callback to atShutdown is called and is complete, the main function must return. @@ -80,17 +80,21 @@ func (g *Manager) RunWithShutdownFns(run RunnableWithShutdownFns) { g.doShutdown() } }() - run(func(ctx context.Context, atShutdown func()) { - go func() { - select { - case <-g.IsShutdown(): + run(func(atShutdown func()) { + g.lock.Lock() + defer g.lock.Unlock() + g.toRunAtShutdown = append(g.toRunAtShutdown, + func() { + defer func() { + if err := recover(); err != nil { + log.Critical("PANIC during RunWithShutdownFns: %v\nStacktrace: %s", err, log.Stack(2)) + g.doShutdown() + } + }() atShutdown() - case <-ctx.Done(): - return - } - }() - }, func(ctx context.Context, atTerminate func()) { - g.RunAtTerminate(ctx, atTerminate) + }) + }, func(atTerminate func()) { + g.RunAtTerminate(atTerminate) }) } @@ -99,7 +103,7 @@ func (g *Manager) RunWithShutdownFns(run RunnableWithShutdownFns) { // (Optionally IsHammer may be waited for instead however, this should be avoided if possible.) // The callback function provided to atTerminate must return once termination is complete. // Please note that use of the atTerminate function will create a go-routine that will wait till terminate - users must therefore be careful to only call this as necessary. -type RunnableWithShutdownChan func(atShutdown <-chan struct{}, atTerminate CallbackWithContext) +type RunnableWithShutdownChan func(atShutdown <-chan struct{}, atTerminate WithCallback) // RunWithShutdownChan takes a function that has channel to watch for shutdown and atTerminate callbacks // After the atShutdown channel is closed, the main function must return once shutdown is complete. @@ -115,8 +119,8 @@ func (g *Manager) RunWithShutdownChan(run RunnableWithShutdownChan) { g.doShutdown() } }() - run(g.IsShutdown(), func(ctx context.Context, atTerminate func()) { - g.RunAtTerminate(ctx, atTerminate) + run(g.IsShutdown(), func(atTerminate func()) { + g.RunAtTerminate(atTerminate) }) } @@ -136,60 +140,65 @@ func (g *Manager) RunWithShutdownContext(run func(context.Context)) { } // RunAtTerminate adds to the terminate wait group and creates a go-routine to run the provided function at termination -func (g *Manager) RunAtTerminate(ctx context.Context, terminate func()) { +func (g *Manager) RunAtTerminate(terminate func()) { g.terminateWaitGroup.Add(1) - go func() { - defer g.terminateWaitGroup.Done() - defer func() { - if err := recover(); err != nil { - log.Critical("PANIC during RunAtTerminate: %v\nStacktrace: %s", err, log.Stack(2)) - } - }() - select { - case <-g.IsTerminate(): + g.lock.Lock() + defer g.lock.Unlock() + g.toRunAtTerminate = append(g.toRunAtTerminate, + func() { + defer g.terminateWaitGroup.Done() + defer func() { + if err := recover(); err != nil { + log.Critical("PANIC during RunAtTerminate: %v\nStacktrace: %s", err, log.Stack(2)) + } + }() terminate() - case <-ctx.Done(): - } - }() + }) } // RunAtShutdown creates a go-routine to run the provided function at shutdown func (g *Manager) RunAtShutdown(ctx context.Context, shutdown func()) { - go func() { - defer func() { - if err := recover(); err != nil { - log.Critical("PANIC during RunAtShutdown: %v\nStacktrace: %s", err, log.Stack(2)) + g.lock.Lock() + defer g.lock.Unlock() + g.toRunAtShutdown = append(g.toRunAtShutdown, + func() { + defer func() { + if err := recover(); err != nil { + log.Critical("PANIC during RunAtShutdown: %v\nStacktrace: %s", err, log.Stack(2)) + } + }() + select { + case <-ctx.Done(): + return + default: + shutdown() } - }() - select { - case <-g.IsShutdown(): - shutdown() - case <-ctx.Done(): - } - }() + }) } // RunAtHammer creates a go-routine to run the provided function at shutdown -func (g *Manager) RunAtHammer(ctx context.Context, hammer func()) { - go func() { - defer func() { - if err := recover(); err != nil { - log.Critical("PANIC during RunAtHammer: %v\nStacktrace: %s", err, log.Stack(2)) - } - }() - select { - case <-g.IsHammer(): +func (g *Manager) RunAtHammer(hammer func()) { + g.lock.Lock() + defer g.lock.Unlock() + g.toRunAtHammer = append(g.toRunAtHammer, + func() { + defer func() { + if err := recover(); err != nil { + log.Critical("PANIC during RunAtHammer: %v\nStacktrace: %s", err, log.Stack(2)) + } + }() hammer() - case <-ctx.Done(): - } - }() + }) } func (g *Manager) doShutdown() { if !g.setStateTransition(stateRunning, stateShuttingDown) { return } g.lock.Lock() - close(g.shutdown) + g.shutdownCtxCancel() + for _, fn := range g.toRunAtShutdown { + go fn() + } g.lock.Unlock() if setting.GracefulHammerTime >= 0 { @@ -203,7 +212,7 @@ func (g *Manager) doShutdown() { g.doTerminate() g.WaitForTerminate() g.lock.Lock() - close(g.done) + g.doneCtxCancel() g.lock.Unlock() }() } @@ -212,10 +221,13 @@ func (g *Manager) doHammerTime(d time.Duration) { time.Sleep(d) g.lock.Lock() select { - case <-g.hammer: + case <-g.hammerCtx.Done(): default: log.Warn("Setting Hammer condition") - close(g.hammer) + g.hammerCtxCancel() + for _, fn := range g.toRunAtHammer { + go fn() + } } g.lock.Unlock() } @@ -226,10 +238,13 @@ func (g *Manager) doTerminate() { } g.lock.Lock() select { - case <-g.terminate: + case <-g.terminateCtx.Done(): default: log.Warn("Terminating") - close(g.terminate) + g.terminateCtxCancel() + for _, fn := range g.toRunAtTerminate { + go fn() + } } g.lock.Unlock() } @@ -242,7 +257,7 @@ func (g *Manager) IsChild() bool { // IsShutdown returns a channel which will be closed at shutdown. // The order of closure is IsShutdown, IsHammer (potentially), IsTerminate func (g *Manager) IsShutdown() <-chan struct{} { - return g.shutdown + return g.shutdownCtx.Done() } // IsHammer returns a channel which will be closed at hammer @@ -250,14 +265,14 @@ func (g *Manager) IsShutdown() <-chan struct{} { // Servers running within the running server wait group should respond to IsHammer // if not shutdown already func (g *Manager) IsHammer() <-chan struct{} { - return g.hammer + return g.hammerCtx.Done() } // IsTerminate returns a channel which will be closed at terminate // The order of closure is IsShutdown, IsHammer (potentially), IsTerminate // IsTerminate will only close once all running servers have stopped func (g *Manager) IsTerminate() <-chan struct{} { - return g.terminate + return g.terminateCtx.Done() } // ServerDone declares a running server done and subtracts one from the @@ -314,25 +329,20 @@ func (g *Manager) InformCleanup() { // Done allows the manager to be viewed as a context.Context, it returns a channel that is closed when the server is finished terminating func (g *Manager) Done() <-chan struct{} { - return g.done + return g.doneCtx.Done() } -// Err allows the manager to be viewed as a context.Context done at Terminate, it returns ErrTerminate +// Err allows the manager to be viewed as a context.Context done at Terminate func (g *Manager) Err() error { - select { - case <-g.Done(): - return ErrTerminate - default: - return nil - } + return g.doneCtx.Err() } -// Value allows the manager to be viewed as a context.Context done at Terminate, it has no values +// Value allows the manager to be viewed as a context.Context done at Terminate func (g *Manager) Value(key interface{}) interface{} { - return nil + return g.doneCtx.Value(key) } // Deadline returns nil as there is no fixed Deadline for the manager, it allows the manager to be viewed as a context.Context func (g *Manager) Deadline() (deadline time.Time, ok bool) { - return + return g.doneCtx.Deadline() } diff --git a/modules/graceful/manager_unix.go b/modules/graceful/manager_unix.go index 540974454c34..20d9b3905c4f 100644 --- a/modules/graceful/manager_unix.go +++ b/modules/graceful/manager_unix.go @@ -25,13 +25,21 @@ type Manager struct { forked bool lock *sync.RWMutex state state - shutdown chan struct{} - hammer chan struct{} - terminate chan struct{} - done chan struct{} + shutdownCtx context.Context + hammerCtx context.Context + terminateCtx context.Context + doneCtx context.Context + shutdownCtxCancel context.CancelFunc + hammerCtxCancel context.CancelFunc + terminateCtxCancel context.CancelFunc + doneCtxCancel context.CancelFunc runningServerWaitGroup sync.WaitGroup createServerWaitGroup sync.WaitGroup terminateWaitGroup sync.WaitGroup + + toRunAtShutdown []func() + toRunAtHammer []func() + toRunAtTerminate []func() } func newGracefulManager(ctx context.Context) *Manager { @@ -45,11 +53,11 @@ func newGracefulManager(ctx context.Context) *Manager { } func (g *Manager) start(ctx context.Context) { - // Make channels - g.terminate = make(chan struct{}) - g.shutdown = make(chan struct{}) - g.hammer = make(chan struct{}) - g.done = make(chan struct{}) + // Make contexts + g.terminateCtx, g.terminateCtxCancel = context.WithCancel(ctx) + g.shutdownCtx, g.shutdownCtxCancel = context.WithCancel(ctx) + g.hammerCtx, g.hammerCtxCancel = context.WithCancel(ctx) + g.doneCtx, g.doneCtxCancel = context.WithCancel(ctx) // Set the running state & handle signals g.setState(stateRunning) diff --git a/modules/graceful/manager_windows.go b/modules/graceful/manager_windows.go index b0e0d1ce38e3..51f29778ba7a 100644 --- a/modules/graceful/manager_windows.go +++ b/modules/graceful/manager_windows.go @@ -36,14 +36,22 @@ type Manager struct { isChild bool lock *sync.RWMutex state state - shutdown chan struct{} - hammer chan struct{} - terminate chan struct{} - done chan struct{} + shutdownCtx context.Context + hammerCtx context.Context + terminateCtx context.Context + doneCtx context.Context + shutdownCtxCancel context.CancelFunc + hammerCtxCancel context.CancelFunc + terminateCtxCancel context.CancelFunc + doneCtxCancel context.CancelFunc runningServerWaitGroup sync.WaitGroup createServerWaitGroup sync.WaitGroup terminateWaitGroup sync.WaitGroup shutdownRequested chan struct{} + + toRunAtShutdown []func() + toRunAtHammer []func() + toRunAtTerminate []func() } func newGracefulManager(ctx context.Context) *Manager { @@ -58,11 +66,13 @@ func newGracefulManager(ctx context.Context) *Manager { } func (g *Manager) start() { + // Make contexts + g.terminateCtx, g.terminateCtxCancel = context.WithCancel(g.ctx) + g.shutdownCtx, g.shutdownCtxCancel = context.WithCancel(g.ctx) + g.hammerCtx, g.hammerCtxCancel = context.WithCancel(g.ctx) + g.doneCtx, g.doneCtxCancel = context.WithCancel(g.ctx) + // Make channels - g.terminate = make(chan struct{}) - g.shutdown = make(chan struct{}) - g.hammer = make(chan struct{}) - g.done = make(chan struct{}) g.shutdownRequested = make(chan struct{}) // Set the running state @@ -74,12 +84,14 @@ func (g *Manager) start() { // Make SVC process run := svc.Run - isWindowsService, err := svc.IsWindowsService() + + //lint:ignore SA1019 We use IsAnInteractiveSession because IsWindowsService has a different permissions profile + isAnInteractiveSession, err := svc.IsAnInteractiveSession() if err != nil { log.Error("Unable to ascertain if running as an Windows Service: %v", err) return } - if !isWindowsService { + if isAnInteractiveSession { log.Trace("Not running a service ... using the debug SVC manager") run = debug.Run } @@ -169,7 +181,7 @@ hammerLoop: default: log.Debug("Unexpected control request: %v", change.Cmd) } - case <-g.hammer: + case <-g.hammerCtx.Done(): break hammerLoop } } diff --git a/modules/graceful/server.go b/modules/graceful/server.go index c5021a9ba5d5..6b7d4a1a970c 100644 --- a/modules/graceful/server.go +++ b/modules/graceful/server.go @@ -17,6 +17,7 @@ import ( "time" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" ) var ( @@ -26,6 +27,10 @@ var ( DefaultWriteTimeOut time.Duration // DefaultMaxHeaderBytes default max header bytes DefaultMaxHeaderBytes int + // PerWriteWriteTimeout timeout for writes + PerWriteWriteTimeout = 30 * time.Second + // PerWriteWriteTimeoutKbTime is a timeout taking account of how much there is to be written + PerWriteWriteTimeoutKbTime = 10 * time.Second ) func init() { @@ -37,14 +42,16 @@ type ServeFunction = func(net.Listener) error // Server represents our graceful server type Server struct { - network string - address string - listener net.Listener - wg sync.WaitGroup - state state - lock *sync.RWMutex - BeforeBegin func(network, address string) - OnShutdown func() + network string + address string + listener net.Listener + wg sync.WaitGroup + state state + lock *sync.RWMutex + BeforeBegin func(network, address string) + OnShutdown func() + PerWriteTimeout time.Duration + PerWritePerKbTimeout time.Duration } // NewServer creates a server on network at provided address @@ -55,11 +62,13 @@ func NewServer(network, address, name string) *Server { log.Info("Starting new %s server: %s:%s on PID: %d", name, network, address, os.Getpid()) } srv := &Server{ - wg: sync.WaitGroup{}, - state: stateInit, - lock: &sync.RWMutex{}, - network: network, - address: address, + wg: sync.WaitGroup{}, + state: stateInit, + lock: &sync.RWMutex{}, + network: network, + address: address, + PerWriteTimeout: setting.PerWriteTimeout, + PerWritePerKbTimeout: setting.PerWritePerKbTimeout, } srv.BeforeBegin = func(network, addr string) { @@ -97,7 +106,7 @@ func (srv *Server) ListenAndServe(serve ServeFunction) error { func (srv *Server) ListenAndServeTLS(certFile, keyFile string, serve ServeFunction) error { config := &tls.Config{} if config.NextProtos == nil { - config.NextProtos = []string{"http/1.1"} + config.NextProtos = []string{"h2", "http/1.1"} } config.Certificates = make([]tls.Certificate, 1) @@ -221,9 +230,11 @@ func (wl *wrappedListener) Accept() (net.Conn, error) { closed := int32(0) c = wrappedConn{ - Conn: c, - server: wl.server, - closed: &closed, + Conn: c, + server: wl.server, + closed: &closed, + perWriteTimeout: wl.server.PerWriteTimeout, + perWritePerKbTimeout: wl.server.PerWritePerKbTimeout, } wl.server.wg.Add(1) @@ -246,8 +257,25 @@ func (wl *wrappedListener) File() (*os.File, error) { type wrappedConn struct { net.Conn - server *Server - closed *int32 + server *Server + closed *int32 + deadline time.Time + perWriteTimeout time.Duration + perWritePerKbTimeout time.Duration +} + +func (w wrappedConn) Write(p []byte) (n int, err error) { + if w.perWriteTimeout > 0 { + minTimeout := time.Duration(len(p)/1024) * w.perWritePerKbTimeout + minDeadline := time.Now().Add(minTimeout).Add(w.perWriteTimeout) + + w.deadline = w.deadline.Add(minTimeout) + if minDeadline.After(w.deadline) { + w.deadline = minDeadline + } + _ = w.Conn.SetWriteDeadline(w.deadline) + } + return w.Conn.Write(p) } func (w wrappedConn) Close() error { diff --git a/modules/graceful/server_http.go b/modules/graceful/server_http.go index b101a10d9197..4471e379ef44 100644 --- a/modules/graceful/server_http.go +++ b/modules/graceful/server_http.go @@ -5,7 +5,9 @@ package graceful import ( + "context" "crypto/tls" + "net" "net/http" ) @@ -16,6 +18,7 @@ func newHTTPServer(network, address, name string, handler http.Handler) (*Server WriteTimeout: DefaultWriteTimeOut, MaxHeaderBytes: DefaultMaxHeaderBytes, Handler: handler, + BaseContext: func(net.Listener) context.Context { return GetManager().HammerContext() }, } server.OnShutdown = func() { httpServer.SetKeepAlivesEnabled(false) diff --git a/modules/highlight/highlight.go b/modules/highlight/highlight.go index 914ba8210ebf..568035fbb7fb 100644 --- a/modules/highlight/highlight.go +++ b/modules/highlight/highlight.go @@ -8,6 +8,7 @@ package highlight import ( "bufio" "bytes" + "fmt" gohtml "html" "path/filepath" "strings" @@ -16,9 +17,11 @@ import ( "code.gitea.io/gitea/modules/analyze" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + "github.com/alecthomas/chroma" "github.com/alecthomas/chroma/formatters/html" "github.com/alecthomas/chroma/lexers" "github.com/alecthomas/chroma/styles" + lru "github.com/hashicorp/golang-lru" ) // don't index files larger than this many bytes for performance purposes @@ -29,6 +32,8 @@ var ( highlightMapping = map[string]string{} once sync.Once + + cache *lru.TwoQueueCache ) // NewContext loads custom highlight map from local config @@ -38,6 +43,13 @@ func NewContext() { for i := range keys { highlightMapping[keys[i].Name()] = keys[i].Value() } + + // The size 512 is simply a conservative rule of thumb + c, err := lru.New2Q(512) + if err != nil { + panic(fmt.Sprintf("failed to initialize LRU cache for highlighter: %s", err)) + } + cache = c }) } @@ -66,14 +78,24 @@ func Code(fileName, code string) string { htmlbuf := bytes.Buffer{} htmlw := bufio.NewWriter(&htmlbuf) + var lexer chroma.Lexer if val, ok := highlightMapping[filepath.Ext(fileName)]; ok { - //change file name to one with mapped extension so we look that up instead - fileName = "mapped." + val + //use mapped value to find lexer + lexer = lexers.Get(val) } - lexer := lexers.Match(fileName) if lexer == nil { - lexer = lexers.Fallback + if l, ok := cache.Get(fileName); ok { + lexer = l.(chroma.Lexer) + } + } + + if lexer == nil { + lexer = lexers.Match(fileName) + if lexer == nil { + lexer = lexers.Fallback + } + cache.Add(fileName, lexer) } iterator, err := lexer.Tokenise(nil, string(code)) @@ -114,17 +136,20 @@ func File(numLines int, fileName string, code []byte) map[int]string { htmlbuf := bytes.Buffer{} htmlw := bufio.NewWriter(&htmlbuf) + var lexer chroma.Lexer if val, ok := highlightMapping[filepath.Ext(fileName)]; ok { - fileName = "test." + val + lexer = lexers.Get(val) } - language := analyze.GetCodeLanguage(fileName, code) - - lexer := lexers.Get(language) if lexer == nil { - lexer = lexers.Match(fileName) + language := analyze.GetCodeLanguage(fileName, code) + + lexer = lexers.Get(language) if lexer == nil { - lexer = lexers.Fallback + lexer = lexers.Match(fileName) + if lexer == nil { + lexer = lexers.Fallback + } } } diff --git a/modules/httplib/httplib.go b/modules/httplib/httplib.go index 294ad0b70b67..5c8eac8b4283 100644 --- a/modules/httplib/httplib.go +++ b/modules/httplib/httplib.go @@ -7,6 +7,7 @@ package httplib import ( "bytes" + "context" "crypto/tls" "encoding/xml" "io" @@ -122,6 +123,12 @@ func (r *Request) Setting(setting Settings) *Request { return r } +// SetContext sets the request's Context +func (r *Request) SetContext(ctx context.Context) *Request { + r.req = r.req.WithContext(ctx) + return r +} + // SetBasicAuth sets the request's Authorization header to use HTTP Basic Authentication with the provided username and password. func (r *Request) SetBasicAuth(username, password string) *Request { r.req.SetBasicAuth(username, password) @@ -325,7 +332,7 @@ func (r *Request) getResponse() (*http.Response, error) { trans = &http.Transport{ TLSClientConfig: r.setting.TLSClientConfig, Proxy: proxy, - Dial: TimeoutDialer(r.setting.ConnectTimeout), + DialContext: TimeoutDialer(r.setting.ConnectTimeout), } } else if t, ok := trans.(*http.Transport); ok { if t.TLSClientConfig == nil { @@ -334,8 +341,8 @@ func (r *Request) getResponse() (*http.Response, error) { if t.Proxy == nil { t.Proxy = r.setting.Proxy } - if t.Dial == nil { - t.Dial = TimeoutDialer(r.setting.ConnectTimeout) + if t.DialContext == nil { + t.DialContext = TimeoutDialer(r.setting.ConnectTimeout) } } @@ -458,9 +465,10 @@ func (r *Request) Response() (*http.Response, error) { } // TimeoutDialer returns functions of connection dialer with timeout settings for http.Transport Dial field. -func TimeoutDialer(cTimeout time.Duration) func(net, addr string) (c net.Conn, err error) { - return func(netw, addr string) (net.Conn, error) { - conn, err := net.DialTimeout(netw, addr, cTimeout) +func TimeoutDialer(cTimeout time.Duration) func(ctx context.Context, net, addr string) (c net.Conn, err error) { + return func(ctx context.Context, netw, addr string) (net.Conn, error) { + d := net.Dialer{Timeout: cTimeout} + conn, err := d.DialContext(ctx, netw, addr) if err != nil { return nil, err } diff --git a/modules/indexer/code/bleve.go b/modules/indexer/code/bleve.go index 416adeea74f2..600789a28409 100644 --- a/modules/indexer/code/bleve.go +++ b/modules/indexer/code/bleve.go @@ -16,12 +16,12 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/analyze" - "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/charset" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/typesniffer" "code.gitea.io/gitea/modules/util" "github.com/blevesearch/bleve/v2" @@ -176,7 +176,7 @@ func NewBleveIndexer(indexDir string) (*BleveIndexer, bool, error) { return indexer, created, err } -func (b *BleveIndexer) addUpdate(batchWriter *io.PipeWriter, batchReader *bufio.Reader, commitSha string, update fileUpdate, repo *models.Repository, batch rupture.FlushingBatch) error { +func (b *BleveIndexer) addUpdate(batchWriter git.WriteCloserError, batchReader *bufio.Reader, commitSha string, update fileUpdate, repo *models.Repository, batch rupture.FlushingBatch) error { // Ignore vendored files in code search if setting.Indexer.ExcludeVendored && analyze.IsVendor(update.Filename) { return nil @@ -211,11 +211,14 @@ func (b *BleveIndexer) addUpdate(batchWriter *io.PipeWriter, batchReader *bufio. fileContents, err := ioutil.ReadAll(io.LimitReader(batchReader, size)) if err != nil { return err - } else if !base.IsTextFile(fileContents) { + } else if !typesniffer.DetectContentType(fileContents).IsText() { // FIXME: UTF-16 files will probably fail here return nil } + if _, err = batchReader.Discard(1); err != nil { + return err + } id := filenameIndexerID(repo.ID, update.Filename) return batch.Index(id, &RepoIndexerData{ RepoID: repo.ID, diff --git a/modules/indexer/code/elastic_search.go b/modules/indexer/code/elastic_search.go index ebb7910fdcb4..569917f151f4 100644 --- a/modules/indexer/code/elastic_search.go +++ b/modules/indexer/code/elastic_search.go @@ -16,12 +16,12 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/analyze" - "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/charset" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/typesniffer" "github.com/go-enry/go-enry/v2" jsoniter "github.com/json-iterator/go" @@ -175,7 +175,7 @@ func (b *ElasticSearchIndexer) init() (bool, error) { return exists, nil } -func (b *ElasticSearchIndexer) addUpdate(batchWriter *io.PipeWriter, batchReader *bufio.Reader, sha string, update fileUpdate, repo *models.Repository) ([]elastic.BulkableRequest, error) { +func (b *ElasticSearchIndexer) addUpdate(batchWriter git.WriteCloserError, batchReader *bufio.Reader, sha string, update fileUpdate, repo *models.Repository) ([]elastic.BulkableRequest, error) { // Ignore vendored files in code search if setting.Indexer.ExcludeVendored && analyze.IsVendor(update.Filename) { return nil, nil @@ -210,11 +210,14 @@ func (b *ElasticSearchIndexer) addUpdate(batchWriter *io.PipeWriter, batchReader fileContents, err := ioutil.ReadAll(io.LimitReader(batchReader, size)) if err != nil { return nil, err - } else if !base.IsTextFile(fileContents) { + } else if !typesniffer.DetectContentType(fileContents).IsText() { // FIXME: UTF-16 files will probably fail here return nil, nil } + if _, err = batchReader.Discard(1); err != nil { + return nil, err + } id := filenameIndexerID(repo.ID, update.Filename) return []elastic.BulkableRequest{ @@ -281,7 +284,7 @@ func (b *ElasticSearchIndexer) Delete(repoID int64) error { } // indexPos find words positions for start and the following end on content. It will -// return the beginning position of the frist start and the ending position of the +// return the beginning position of the first start and the ending position of the // first end following the start string. // If not found any of the positions, it will return -1, -1. func indexPos(content, start, end string) (int, int) { @@ -305,8 +308,8 @@ func convertResult(searchResult *elastic.SearchResult, kw string, pageSize int) var startIndex, endIndex int = -1, -1 c, ok := hit.Highlight["content"] if ok && len(c) > 0 { - // FIXME: Since the high lighting content will include and for the keywords, - // now we should find the poisitions. But how to avoid html content which contains the + // FIXME: Since the highlighting content will include and for the keywords, + // now we should find the positions. But how to avoid html content which contains the // and tags? If elastic search has handled that? startIndex, endIndex = indexPos(c[0], "", "") if startIndex == -1 { diff --git a/modules/indexer/code/indexer.go b/modules/indexer/code/indexer.go index a7d78e9fdc82..67fa43eda89d 100644 --- a/modules/indexer/code/indexer.go +++ b/modules/indexer/code/indexer.go @@ -115,7 +115,13 @@ func Init() { ctx, cancel := context.WithCancel(context.Background()) - graceful.GetManager().RunAtTerminate(ctx, func() { + graceful.GetManager().RunAtTerminate(func() { + select { + case <-ctx.Done(): + return + default: + } + cancel() log.Debug("Closing repository indexer") indexer.Close() log.Info("PID: %d Repository Indexer closed", os.Getpid()) diff --git a/modules/indexer/code/indexer_test.go b/modules/indexer/code/indexer_test.go index 8fcb7a0e8a67..01717bd288a7 100644 --- a/modules/indexer/code/indexer_test.go +++ b/modules/indexer/code/indexer_test.go @@ -67,7 +67,7 @@ func testIndexer(name string, t *testing.T, indexer Indexer) { total, res, langs, err := indexer.Search(kw.RepoIDs, "", kw.Keyword, 1, 10, false) assert.NoError(t, err) assert.EqualValues(t, len(kw.IDs), total) - assert.EqualValues(t, kw.Langs, len(langs)) + assert.Len(t, langs, kw.Langs) var ids = make([]int64, 0, len(res)) for _, hit := range res { diff --git a/modules/indexer/issues/indexer.go b/modules/indexer/issues/indexer.go index 9edaef6bdd01..676b6686ea5b 100644 --- a/modules/indexer/issues/indexer.go +++ b/modules/indexer/issues/indexer.go @@ -160,7 +160,7 @@ func InitIssueIndexer(syncReindex bool) { } populate = !exist holder.set(issueIndexer) - graceful.GetManager().RunAtTerminate(context.Background(), func() { + graceful.GetManager().RunAtTerminate(func() { log.Debug("Closing issue indexer") issueIndexer := holder.get() if issueIndexer != nil { @@ -170,7 +170,7 @@ func InitIssueIndexer(syncReindex bool) { }) log.Debug("Created Bleve Indexer") case "elasticsearch": - graceful.GetManager().RunWithShutdownFns(func(_, atTerminate func(context.Context, func())) { + graceful.GetManager().RunWithShutdownFns(func(_, atTerminate func(func())) { issueIndexer, err := NewElasticSearchIndexer(setting.Indexer.IssueConnStr, setting.Indexer.IssueIndexerName) if err != nil { log.Fatal("Unable to initialize Elastic Search Issue Indexer at connection: %s Error: %v", setting.Indexer.IssueConnStr, err) diff --git a/modules/lfs/client.go b/modules/lfs/client.go index ae35919d770b..0a21440f73d0 100644 --- a/modules/lfs/client.go +++ b/modules/lfs/client.go @@ -10,9 +10,17 @@ import ( "net/url" ) +// DownloadCallback gets called for every requested LFS object to process its content +type DownloadCallback func(p Pointer, content io.ReadCloser, objectError error) error + +// UploadCallback gets called for every requested LFS object to provide its content +type UploadCallback func(p Pointer, objectError error) (io.ReadCloser, error) + // Client is used to communicate with a LFS source type Client interface { - Download(ctx context.Context, oid string, size int64) (io.ReadCloser, error) + BatchSize() int + Download(ctx context.Context, objects []Pointer, callback DownloadCallback) error + Upload(ctx context.Context, objects []Pointer, callback UploadCallback) error } // NewClient creates a LFS client diff --git a/modules/lfs/client_test.go b/modules/lfs/client_test.go index d4eb00546948..1040b3992560 100644 --- a/modules/lfs/client_test.go +++ b/modules/lfs/client_test.go @@ -6,7 +6,6 @@ package lfs import ( "net/url" - "testing" "github.com/stretchr/testify/assert" diff --git a/modules/lfs/filesystem_client.go b/modules/lfs/filesystem_client.go index 3a51564a821b..dc72981a9ec9 100644 --- a/modules/lfs/filesystem_client.go +++ b/modules/lfs/filesystem_client.go @@ -19,6 +19,11 @@ type FilesystemClient struct { lfsdir string } +// BatchSize returns the preferred size of batchs to process +func (c *FilesystemClient) BatchSize() int { + return 1 +} + func newFilesystemClient(endpoint *url.URL) *FilesystemClient { path, _ := util.FileURLToPath(endpoint) @@ -33,18 +38,56 @@ func (c *FilesystemClient) objectPath(oid string) string { return filepath.Join(c.lfsdir, oid[0:2], oid[2:4], oid) } -// Download reads the specific LFS object from the target repository -func (c *FilesystemClient) Download(ctx context.Context, oid string, size int64) (io.ReadCloser, error) { - objectPath := c.objectPath(oid) +// Download reads the specific LFS object from the target path +func (c *FilesystemClient) Download(ctx context.Context, objects []Pointer, callback DownloadCallback) error { + for _, object := range objects { + p := Pointer{object.Oid, object.Size} - if _, err := os.Stat(objectPath); os.IsNotExist(err) { - return nil, err - } + objectPath := c.objectPath(p.Oid) + + f, err := os.Open(objectPath) + if err != nil { + return err + } - file, err := os.Open(objectPath) - if err != nil { - return nil, err + if err := callback(p, f, nil); err != nil { + return err + } } + return nil +} + +// Upload writes the specific LFS object to the target path +func (c *FilesystemClient) Upload(ctx context.Context, objects []Pointer, callback UploadCallback) error { + for _, object := range objects { + p := Pointer{object.Oid, object.Size} + + objectPath := c.objectPath(p.Oid) - return file, nil + if err := os.MkdirAll(filepath.Dir(objectPath), os.ModePerm); err != nil { + return err + } + + content, err := callback(p, nil) + if err != nil { + return err + } + + err = func() error { + defer content.Close() + + f, err := os.Create(objectPath) + if err != nil { + return err + } + + _, err = io.Copy(f, content) + + return err + }() + if err != nil { + return err + } + } + return nil } diff --git a/modules/lfs/http_client.go b/modules/lfs/http_client.go index fb45defda1cf..e799b80831ea 100644 --- a/modules/lfs/http_client.go +++ b/modules/lfs/http_client.go @@ -7,17 +7,19 @@ package lfs import ( "bytes" "context" - "encoding/json" "errors" "fmt" - "io" "net/http" "net/url" "strings" "code.gitea.io/gitea/modules/log" + + jsoniter "github.com/json-iterator/go" ) +const batchSize = 20 + // HTTPClient is used to communicate with the LFS server // https://github.com/git-lfs/git-lfs/blob/main/docs/api/batch.md type HTTPClient struct { @@ -26,6 +28,11 @@ type HTTPClient struct { transfers map[string]TransferAdapter } +// BatchSize returns the preferred size of batchs to process +func (c *HTTPClient) BatchSize() int { + return batchSize +} + func newHTTPClient(endpoint *url.URL) *HTTPClient { hc := &http.Client{} @@ -55,21 +62,25 @@ func (c *HTTPClient) transferNames() []string { } func (c *HTTPClient) batch(ctx context.Context, operation string, objects []Pointer) (*BatchResponse, error) { + log.Trace("BATCH operation with objects: %v", objects) + url := fmt.Sprintf("%s/objects/batch", c.endpoint) request := &BatchRequest{operation, c.transferNames(), nil, objects} payload := new(bytes.Buffer) - err := json.NewEncoder(payload).Encode(request) + err := jsoniter.NewEncoder(payload).Encode(request) if err != nil { - return nil, fmt.Errorf("lfs.HTTPClient.batch json.Encode: %w", err) + log.Error("Error encoding json: %v", err) + return nil, err } - log.Trace("lfs.HTTPClient.batch NewRequestWithContext: %s", url) + log.Trace("Calling: %s", url) req, err := http.NewRequestWithContext(ctx, "POST", url, payload) if err != nil { - return nil, fmt.Errorf("lfs.HTTPClient.batch http.NewRequestWithContext: %w", err) + log.Error("Error creating request: %v", err) + return nil, err } req.Header.Set("Content-type", MediaType) req.Header.Set("Accept", MediaType) @@ -81,18 +92,20 @@ func (c *HTTPClient) batch(ctx context.Context, operation string, objects []Poin return nil, ctx.Err() default: } - return nil, fmt.Errorf("lfs.HTTPClient.batch http.Do: %w", err) + log.Error("Error while processing request: %v", err) + return nil, err } defer res.Body.Close() if res.StatusCode != http.StatusOK { - return nil, fmt.Errorf("lfs.HTTPClient.batch: Unexpected servers response: %s", res.Status) + return nil, fmt.Errorf("Unexpected server response: %s", res.Status) } var response BatchResponse - err = json.NewDecoder(res.Body).Decode(&response) + err = jsoniter.NewDecoder(res.Body).Decode(&response) if err != nil { - return nil, fmt.Errorf("lfs.HTTPClient.batch json.Decode: %w", err) + log.Error("Error decoding json: %v", err) + return nil, err } if len(response.Transfer) == 0 { @@ -103,27 +116,99 @@ func (c *HTTPClient) batch(ctx context.Context, operation string, objects []Poin } // Download reads the specific LFS object from the LFS server -func (c *HTTPClient) Download(ctx context.Context, oid string, size int64) (io.ReadCloser, error) { - var objects []Pointer - objects = append(objects, Pointer{oid, size}) +func (c *HTTPClient) Download(ctx context.Context, objects []Pointer, callback DownloadCallback) error { + return c.performOperation(ctx, objects, callback, nil) +} + +// Upload sends the specific LFS object to the LFS server +func (c *HTTPClient) Upload(ctx context.Context, objects []Pointer, callback UploadCallback) error { + return c.performOperation(ctx, objects, nil, callback) +} - result, err := c.batch(ctx, "download", objects) +func (c *HTTPClient) performOperation(ctx context.Context, objects []Pointer, dc DownloadCallback, uc UploadCallback) error { + if len(objects) == 0 { + return nil + } + + operation := "download" + if uc != nil { + operation = "upload" + } + + result, err := c.batch(ctx, operation, objects) if err != nil { - return nil, err + return err } transferAdapter, ok := c.transfers[result.Transfer] if !ok { - return nil, fmt.Errorf("lfs.HTTPClient.Download Transferadapter not found: %s", result.Transfer) + return fmt.Errorf("TransferAdapter not found: %s", result.Transfer) } - if len(result.Objects) == 0 { - return nil, errors.New("lfs.HTTPClient.Download: No objects in result") - } + for _, object := range result.Objects { + if object.Error != nil { + objectError := errors.New(object.Error.Message) + log.Trace("Error on object %v: %v", object.Pointer, objectError) + if uc != nil { + if _, err := uc(object.Pointer, objectError); err != nil { + return err + } + } else { + if err := dc(object.Pointer, nil, objectError); err != nil { + return err + } + } + continue + } - content, err := transferAdapter.Download(ctx, result.Objects[0]) - if err != nil { - return nil, err + if uc != nil { + if len(object.Actions) == 0 { + log.Trace("%v already present on server", object.Pointer) + continue + } + + link, ok := object.Actions["upload"] + if !ok { + log.Debug("%+v", object) + return errors.New("Missing action 'upload'") + } + + content, err := uc(object.Pointer, nil) + if err != nil { + return err + } + + err = transferAdapter.Upload(ctx, link, object.Pointer, content) + + content.Close() + + if err != nil { + return err + } + + link, ok = object.Actions["verify"] + if ok { + if err := transferAdapter.Verify(ctx, link, object.Pointer); err != nil { + return err + } + } + } else { + link, ok := object.Actions["download"] + if !ok { + log.Debug("%+v", object) + return errors.New("Missing action 'download'") + } + + content, err := transferAdapter.Download(ctx, link) + if err != nil { + return err + } + + if err := dc(object.Pointer, content, nil); err != nil { + return err + } + } } - return content, nil + + return nil } diff --git a/modules/lfs/http_client_test.go b/modules/lfs/http_client_test.go index 043aa0214e86..0f633ede54cd 100644 --- a/modules/lfs/http_client_test.go +++ b/modules/lfs/http_client_test.go @@ -7,13 +7,13 @@ package lfs import ( "bytes" "context" - "encoding/json" "io" "io/ioutil" "net/http" "strings" "testing" + jsoniter "github.com/json-iterator/go" "github.com/stretchr/testify/assert" ) @@ -30,69 +30,253 @@ func (a *DummyTransferAdapter) Name() string { return "dummy" } -func (a *DummyTransferAdapter) Download(ctx context.Context, r *ObjectResponse) (io.ReadCloser, error) { +func (a *DummyTransferAdapter) Download(ctx context.Context, l *Link) (io.ReadCloser, error) { return ioutil.NopCloser(bytes.NewBufferString("dummy")), nil } -func TestHTTPClientDownload(t *testing.T) { - oid := "fb8f7d8435968c4f82a726a92395be4d16f2f63116caf36c8ad35c60831ab041" - size := int64(6) +func (a *DummyTransferAdapter) Upload(ctx context.Context, l *Link, p Pointer, r io.Reader) error { + return nil +} + +func (a *DummyTransferAdapter) Verify(ctx context.Context, l *Link, p Pointer) error { + return nil +} + +func lfsTestRoundtripHandler(req *http.Request) *http.Response { + var batchResponse *BatchResponse + url := req.URL.String() - roundTripHandler := func(req *http.Request) *http.Response { - url := req.URL.String() - if strings.Contains(url, "status-not-ok") { - return &http.Response{StatusCode: http.StatusBadRequest} + if strings.Contains(url, "status-not-ok") { + return &http.Response{StatusCode: http.StatusBadRequest} + } else if strings.Contains(url, "invalid-json-response") { + return &http.Response{StatusCode: http.StatusOK, Body: ioutil.NopCloser(bytes.NewBufferString("invalid json"))} + } else if strings.Contains(url, "valid-batch-request-download") { + batchResponse = &BatchResponse{ + Transfer: "dummy", + Objects: []*ObjectResponse{ + { + Actions: map[string]*Link{ + "download": {}, + }, + }, + }, + } + } else if strings.Contains(url, "valid-batch-request-upload") { + batchResponse = &BatchResponse{ + Transfer: "dummy", + Objects: []*ObjectResponse{ + { + Actions: map[string]*Link{ + "upload": {}, + }, + }, + }, } - if strings.Contains(url, "invalid-json-response") { - return &http.Response{StatusCode: http.StatusOK, Body: ioutil.NopCloser(bytes.NewBufferString("invalid json"))} + } else if strings.Contains(url, "response-no-objects") { + batchResponse = &BatchResponse{Transfer: "dummy"} + } else if strings.Contains(url, "unknown-transfer-adapter") { + batchResponse = &BatchResponse{Transfer: "unknown_adapter"} + } else if strings.Contains(url, "error-in-response-objects") { + batchResponse = &BatchResponse{ + Transfer: "dummy", + Objects: []*ObjectResponse{ + { + Error: &ObjectError{ + Code: 404, + Message: "Object not found", + }, + }, + }, } - if strings.Contains(url, "valid-batch-request-download") { - assert.Equal(t, "POST", req.Method) - assert.Equal(t, MediaType, req.Header.Get("Content-type"), "case %s: error should match", url) - assert.Equal(t, MediaType, req.Header.Get("Accept"), "case %s: error should match", url) + } else if strings.Contains(url, "empty-actions-map") { + batchResponse = &BatchResponse{ + Transfer: "dummy", + Objects: []*ObjectResponse{ + { + Actions: map[string]*Link{}, + }, + }, + } + } else if strings.Contains(url, "download-actions-map") { + batchResponse = &BatchResponse{ + Transfer: "dummy", + Objects: []*ObjectResponse{ + { + Actions: map[string]*Link{ + "download": {}, + }, + }, + }, + } + } else if strings.Contains(url, "upload-actions-map") { + batchResponse = &BatchResponse{ + Transfer: "dummy", + Objects: []*ObjectResponse{ + { + Actions: map[string]*Link{ + "upload": {}, + }, + }, + }, + } + } else if strings.Contains(url, "verify-actions-map") { + batchResponse = &BatchResponse{ + Transfer: "dummy", + Objects: []*ObjectResponse{ + { + Actions: map[string]*Link{ + "verify": {}, + }, + }, + }, + } + } else if strings.Contains(url, "unknown-actions-map") { + batchResponse = &BatchResponse{ + Transfer: "dummy", + Objects: []*ObjectResponse{ + { + Actions: map[string]*Link{ + "unknown": {}, + }, + }, + }, + } + } else { + return nil + } - var batchRequest BatchRequest - err := json.NewDecoder(req.Body).Decode(&batchRequest) - assert.NoError(t, err) + payload := new(bytes.Buffer) + jsoniter.NewEncoder(payload).Encode(batchResponse) - assert.Equal(t, "download", batchRequest.Operation) - assert.Equal(t, 1, len(batchRequest.Objects)) - assert.Equal(t, oid, batchRequest.Objects[0].Oid) - assert.Equal(t, size, batchRequest.Objects[0].Size) + return &http.Response{StatusCode: http.StatusOK, Body: ioutil.NopCloser(payload)} +} - batchResponse := &BatchResponse{ - Transfer: "dummy", - Objects: make([]*ObjectResponse, 1), - } +func TestHTTPClientDownload(t *testing.T) { + p := Pointer{Oid: "fb8f7d8435968c4f82a726a92395be4d16f2f63116caf36c8ad35c60831ab041", Size: 6} - payload := new(bytes.Buffer) - json.NewEncoder(payload).Encode(batchResponse) + hc := &http.Client{Transport: RoundTripFunc(func(req *http.Request) *http.Response { + assert.Equal(t, "POST", req.Method) + assert.Equal(t, MediaType, req.Header.Get("Content-type")) + assert.Equal(t, MediaType, req.Header.Get("Accept")) - return &http.Response{StatusCode: http.StatusOK, Body: ioutil.NopCloser(payload)} - } - if strings.Contains(url, "invalid-response-no-objects") { - batchResponse := &BatchResponse{Transfer: "dummy"} + var batchRequest BatchRequest + err := jsoniter.NewDecoder(req.Body).Decode(&batchRequest) + assert.NoError(t, err) - payload := new(bytes.Buffer) - json.NewEncoder(payload).Encode(batchResponse) + assert.Equal(t, "download", batchRequest.Operation) + assert.Equal(t, 1, len(batchRequest.Objects)) + assert.Equal(t, p.Oid, batchRequest.Objects[0].Oid) + assert.Equal(t, p.Size, batchRequest.Objects[0].Size) - return &http.Response{StatusCode: http.StatusOK, Body: ioutil.NopCloser(payload)} - } - if strings.Contains(url, "unknown-transfer-adapter") { - batchResponse := &BatchResponse{Transfer: "unknown_adapter"} + return lfsTestRoundtripHandler(req) + })} + dummy := &DummyTransferAdapter{} - payload := new(bytes.Buffer) - json.NewEncoder(payload).Encode(batchResponse) + var cases = []struct { + endpoint string + expectederror string + }{ + // case 0 + { + endpoint: "https://status-not-ok.io", + expectederror: "Unexpected server response: ", + }, + // case 1 + { + endpoint: "https://invalid-json-response.io", + expectederror: "invalid json", + }, + // case 2 + { + endpoint: "https://valid-batch-request-download.io", + expectederror: "", + }, + // case 3 + { + endpoint: "https://response-no-objects.io", + expectederror: "", + }, + // case 4 + { + endpoint: "https://unknown-transfer-adapter.io", + expectederror: "TransferAdapter not found: ", + }, + // case 5 + { + endpoint: "https://error-in-response-objects.io", + expectederror: "Object not found", + }, + // case 6 + { + endpoint: "https://empty-actions-map.io", + expectederror: "Missing action 'download'", + }, + // case 7 + { + endpoint: "https://download-actions-map.io", + expectederror: "", + }, + // case 8 + { + endpoint: "https://upload-actions-map.io", + expectederror: "Missing action 'download'", + }, + // case 9 + { + endpoint: "https://verify-actions-map.io", + expectederror: "Missing action 'download'", + }, + // case 10 + { + endpoint: "https://unknown-actions-map.io", + expectederror: "Missing action 'download'", + }, + } - return &http.Response{StatusCode: http.StatusOK, Body: ioutil.NopCloser(payload)} + for n, c := range cases { + client := &HTTPClient{ + client: hc, + endpoint: c.endpoint, + transfers: make(map[string]TransferAdapter), } + client.transfers["dummy"] = dummy - t.Errorf("Unknown test case: %s", url) - - return nil + err := client.Download(context.Background(), []Pointer{p}, func(p Pointer, content io.ReadCloser, objectError error) error { + if objectError != nil { + return objectError + } + b, err := io.ReadAll(content) + assert.NoError(t, err) + assert.Equal(t, []byte("dummy"), b) + return nil + }) + if len(c.expectederror) > 0 { + assert.True(t, strings.Contains(err.Error(), c.expectederror), "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror) + } else { + assert.NoError(t, err, "case %d", n) + } } +} + +func TestHTTPClientUpload(t *testing.T) { + p := Pointer{Oid: "fb8f7d8435968c4f82a726a92395be4d16f2f63116caf36c8ad35c60831ab041", Size: 6} + + hc := &http.Client{Transport: RoundTripFunc(func(req *http.Request) *http.Response { + assert.Equal(t, "POST", req.Method) + assert.Equal(t, MediaType, req.Header.Get("Content-type")) + assert.Equal(t, MediaType, req.Header.Get("Accept")) + + var batchRequest BatchRequest + err := jsoniter.NewDecoder(req.Body).Decode(&batchRequest) + assert.NoError(t, err) - hc := &http.Client{Transport: RoundTripFunc(roundTripHandler)} + assert.Equal(t, "upload", batchRequest.Operation) + assert.Equal(t, 1, len(batchRequest.Objects)) + assert.Equal(t, p.Oid, batchRequest.Objects[0].Oid) + assert.Equal(t, p.Size, batchRequest.Objects[0].Size) + + return lfsTestRoundtripHandler(req) + })} dummy := &DummyTransferAdapter{} var cases = []struct { @@ -102,27 +286,57 @@ func TestHTTPClientDownload(t *testing.T) { // case 0 { endpoint: "https://status-not-ok.io", - expectederror: "Unexpected servers response: ", + expectederror: "Unexpected server response: ", }, // case 1 { endpoint: "https://invalid-json-response.io", - expectederror: "json.Decode: ", + expectederror: "invalid json", }, // case 2 { - endpoint: "https://valid-batch-request-download.io", + endpoint: "https://valid-batch-request-upload.io", expectederror: "", }, // case 3 { - endpoint: "https://invalid-response-no-objects.io", - expectederror: "No objects in result", + endpoint: "https://response-no-objects.io", + expectederror: "", }, // case 4 { endpoint: "https://unknown-transfer-adapter.io", - expectederror: "Transferadapter not found: ", + expectederror: "TransferAdapter not found: ", + }, + // case 5 + { + endpoint: "https://error-in-response-objects.io", + expectederror: "Object not found", + }, + // case 6 + { + endpoint: "https://empty-actions-map.io", + expectederror: "", + }, + // case 7 + { + endpoint: "https://download-actions-map.io", + expectederror: "Missing action 'upload'", + }, + // case 8 + { + endpoint: "https://upload-actions-map.io", + expectederror: "", + }, + // case 9 + { + endpoint: "https://verify-actions-map.io", + expectederror: "Missing action 'upload'", + }, + // case 10 + { + endpoint: "https://unknown-actions-map.io", + expectederror: "Missing action 'upload'", }, } @@ -134,7 +348,9 @@ func TestHTTPClientDownload(t *testing.T) { } client.transfers["dummy"] = dummy - _, err := client.Download(context.Background(), oid, size) + err := client.Upload(context.Background(), []Pointer{p}, func(p Pointer, objectError error) (io.ReadCloser, error) { + return ioutil.NopCloser(new(bytes.Buffer)), objectError + }) if len(c.expectederror) > 0 { assert.True(t, strings.Contains(err.Error(), c.expectederror), "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror) } else { diff --git a/modules/lfs/pointer_test.go b/modules/lfs/pointer_test.go index 0ed6df2c6da1..9cd8b15c9e04 100644 --- a/modules/lfs/pointer_test.go +++ b/modules/lfs/pointer_test.go @@ -15,16 +15,16 @@ import ( func TestStringContent(t *testing.T) { p := Pointer{Oid: "4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393", Size: 1234} expected := "version https://git-lfs.github.com/spec/v1\noid sha256:4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393\nsize 1234\n" - assert.Equal(t, p.StringContent(), expected) + assert.Equal(t, expected, p.StringContent()) } func TestRelativePath(t *testing.T) { p := Pointer{Oid: "4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393"} expected := path.Join("4d", "7a", "214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393") - assert.Equal(t, p.RelativePath(), expected) + assert.Equal(t, expected, p.RelativePath()) p2 := Pointer{Oid: "4d7a"} - assert.Equal(t, p2.RelativePath(), "4d7a") + assert.Equal(t, "4d7a", p2.RelativePath()) } func TestIsValid(t *testing.T) { @@ -48,8 +48,8 @@ func TestGeneratePointer(t *testing.T) { p, err := GeneratePointer(strings.NewReader("Gitea")) assert.NoError(t, err) assert.True(t, p.IsValid()) - assert.Equal(t, p.Oid, "94cb57646c54a297c9807697e80a30946f79a4b82cb079d2606847825b1812cc") - assert.Equal(t, p.Size, int64(5)) + assert.Equal(t, "94cb57646c54a297c9807697e80a30946f79a4b82cb079d2606847825b1812cc", p.Oid) + assert.Equal(t, int64(5), p.Size) } func TestReadPointerFromBuffer(t *testing.T) { @@ -84,20 +84,20 @@ func TestReadPointerFromBuffer(t *testing.T) { p, err = ReadPointerFromBuffer([]byte("version https://git-lfs.github.com/spec/v1\noid sha256:4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393\nsize 1234\n")) assert.NoError(t, err) assert.True(t, p.IsValid()) - assert.Equal(t, p.Oid, "4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393") - assert.Equal(t, p.Size, int64(1234)) + assert.Equal(t, "4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393", p.Oid) + assert.Equal(t, int64(1234), p.Size) p, err = ReadPointerFromBuffer([]byte("version https://git-lfs.github.com/spec/v1\noid sha256:4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393\nsize 1234\ntest")) assert.NoError(t, err) assert.True(t, p.IsValid()) - assert.Equal(t, p.Oid, "4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393") - assert.Equal(t, p.Size, int64(1234)) + assert.Equal(t, "4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393", p.Oid) + assert.Equal(t, int64(1234), p.Size) } func TestReadPointer(t *testing.T) { p, err := ReadPointer(strings.NewReader("version https://git-lfs.github.com/spec/v1\noid sha256:4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393\nsize 1234\n")) assert.NoError(t, err) assert.True(t, p.IsValid()) - assert.Equal(t, p.Oid, "4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393") - assert.Equal(t, p.Size, int64(1234)) + assert.Equal(t, "4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393", p.Oid) + assert.Equal(t, int64(1234), p.Size) } diff --git a/modules/lfs/shared.go b/modules/lfs/shared.go index 70b76d7512d5..d010b05ee56a 100644 --- a/modules/lfs/shared.go +++ b/modules/lfs/shared.go @@ -45,18 +45,18 @@ type BatchResponse struct { // ObjectResponse is object metadata as seen by clients of the LFS server. type ObjectResponse struct { Pointer - Actions map[string]*Link `json:"actions"` + Actions map[string]*Link `json:"actions,omitempty"` Error *ObjectError `json:"error,omitempty"` } -// Link provides a structure used to build a hypermedia representation of an HTTP link. +// Link provides a structure with information about how to access a object. type Link struct { Href string `json:"href"` Header map[string]string `json:"header,omitempty"` - ExpiresAt time.Time `json:"expires_at,omitempty"` + ExpiresAt *time.Time `json:"expires_at,omitempty"` } -// ObjectError defines the JSON structure returned to the client in case of an error +// ObjectError defines the JSON structure returned to the client in case of an error. type ObjectError struct { Code int `json:"code"` Message string `json:"message"` @@ -67,3 +67,10 @@ type PointerBlob struct { Hash string Pointer } + +// ErrorResponse describes the error to the client. +type ErrorResponse struct { + Message string + DocumentationURL string `json:"documentation_url,omitempty"` + RequestID string `json:"request_id,omitempty"` +} diff --git a/modules/lfs/transferadapter.go b/modules/lfs/transferadapter.go index ea3aff0000b9..8c40ab8c0446 100644 --- a/modules/lfs/transferadapter.go +++ b/modules/lfs/transferadapter.go @@ -5,18 +5,24 @@ package lfs import ( + "bytes" "context" "errors" "fmt" "io" "net/http" + + "code.gitea.io/gitea/modules/log" + + jsoniter "github.com/json-iterator/go" ) // TransferAdapter represents an adapter for downloading/uploading LFS objects type TransferAdapter interface { Name() string - Download(ctx context.Context, r *ObjectResponse) (io.ReadCloser, error) - //Upload(ctx context.Context, reader io.Reader) error + Download(ctx context.Context, l *Link) (io.ReadCloser, error) + Upload(ctx context.Context, l *Link, p Pointer, r io.Reader) error + Verify(ctx context.Context, l *Link, p Pointer) error } // BasicTransferAdapter implements the "basic" adapter @@ -30,29 +36,101 @@ func (a *BasicTransferAdapter) Name() string { } // Download reads the download location and downloads the data -func (a *BasicTransferAdapter) Download(ctx context.Context, r *ObjectResponse) (io.ReadCloser, error) { - download, ok := r.Actions["download"] - if !ok { - return nil, errors.New("lfs.BasicTransferAdapter.Download: Action 'download' not found") +func (a *BasicTransferAdapter) Download(ctx context.Context, l *Link) (io.ReadCloser, error) { + resp, err := a.performRequest(ctx, "GET", l, nil, nil) + if err != nil { + return nil, err } + return resp.Body, nil +} - req, err := http.NewRequestWithContext(ctx, "GET", download.Href, nil) +// Upload sends the content to the LFS server +func (a *BasicTransferAdapter) Upload(ctx context.Context, l *Link, p Pointer, r io.Reader) error { + _, err := a.performRequest(ctx, "PUT", l, r, func(req *http.Request) { + if len(req.Header.Get("Content-Type")) == 0 { + req.Header.Set("Content-Type", "application/octet-stream") + } + + if req.Header.Get("Transfer-Encoding") == "chunked" { + req.TransferEncoding = []string{"chunked"} + } + + req.ContentLength = p.Size + }) if err != nil { - return nil, fmt.Errorf("lfs.BasicTransferAdapter.Download http.NewRequestWithContext: %w", err) + return err } - for key, value := range download.Header { + return nil +} + +// Verify calls the verify handler on the LFS server +func (a *BasicTransferAdapter) Verify(ctx context.Context, l *Link, p Pointer) error { + b, err := jsoniter.Marshal(p) + if err != nil { + log.Error("Error encoding json: %v", err) + return err + } + + _, err = a.performRequest(ctx, "POST", l, bytes.NewReader(b), func(req *http.Request) { + req.Header.Set("Content-Type", MediaType) + }) + if err != nil { + return err + } + return nil +} + +func (a *BasicTransferAdapter) performRequest(ctx context.Context, method string, l *Link, body io.Reader, callback func(*http.Request)) (*http.Response, error) { + log.Trace("Calling: %s %s", method, l.Href) + + req, err := http.NewRequestWithContext(ctx, method, l.Href, body) + if err != nil { + log.Error("Error creating request: %v", err) + return nil, err + } + for key, value := range l.Header { req.Header.Set(key, value) } + req.Header.Set("Accept", MediaType) + + if callback != nil { + callback(req) + } res, err := a.client.Do(req) if err != nil { select { case <-ctx.Done(): - return nil, ctx.Err() + return res, ctx.Err() default: } - return nil, fmt.Errorf("lfs.BasicTransferAdapter.Download http.Do: %w", err) + log.Error("Error while processing request: %v", err) + return res, err + } + + if res.StatusCode != http.StatusOK { + return res, handleErrorResponse(res) + } + + return res, nil +} + +func handleErrorResponse(resp *http.Response) error { + defer resp.Body.Close() + + er, err := decodeReponseError(resp.Body) + if err != nil { + return fmt.Errorf("Request failed with status %s", resp.Status) } + log.Trace("ErrorRespone: %v", er) + return errors.New(er.Message) +} - return res.Body, nil +func decodeReponseError(r io.Reader) (ErrorResponse, error) { + var er ErrorResponse + err := jsoniter.NewDecoder(r).Decode(&er) + if err != nil { + log.Error("Error decoding json: %v", err) + } + return er, err } diff --git a/modules/lfs/transferadapter_test.go b/modules/lfs/transferadapter_test.go index 0eabd3faeee1..7dfdad417ea5 100644 --- a/modules/lfs/transferadapter_test.go +++ b/modules/lfs/transferadapter_test.go @@ -7,11 +7,13 @@ package lfs import ( "bytes" "context" + "io" "io/ioutil" "net/http" "strings" "testing" + jsoniter "github.com/json-iterator/go" "github.com/stretchr/testify/assert" ) @@ -21,58 +23,151 @@ func TestBasicTransferAdapterName(t *testing.T) { assert.Equal(t, "basic", a.Name()) } -func TestBasicTransferAdapterDownload(t *testing.T) { +func TestBasicTransferAdapter(t *testing.T) { + p := Pointer{Oid: "b5a2c96250612366ea272ffac6d9744aaf4b45aacd96aa7cfcb931ee3b558259", Size: 5} + roundTripHandler := func(req *http.Request) *http.Response { + assert.Equal(t, MediaType, req.Header.Get("Accept")) + assert.Equal(t, "test-value", req.Header.Get("test-header")) + url := req.URL.String() - if strings.Contains(url, "valid-download-request") { + if strings.Contains(url, "download-request") { assert.Equal(t, "GET", req.Method) - assert.Equal(t, "test-value", req.Header.Get("test-header")) return &http.Response{StatusCode: http.StatusOK, Body: ioutil.NopCloser(bytes.NewBufferString("dummy"))} - } + } else if strings.Contains(url, "upload-request") { + assert.Equal(t, "PUT", req.Method) + assert.Equal(t, "application/octet-stream", req.Header.Get("Content-Type")) + + b, err := io.ReadAll(req.Body) + assert.NoError(t, err) + assert.Equal(t, "dummy", string(b)) - t.Errorf("Unknown test case: %s", url) + return &http.Response{StatusCode: http.StatusOK} + } else if strings.Contains(url, "verify-request") { + assert.Equal(t, "POST", req.Method) + assert.Equal(t, MediaType, req.Header.Get("Content-Type")) - return nil + var vp Pointer + err := jsoniter.NewDecoder(req.Body).Decode(&vp) + assert.NoError(t, err) + assert.Equal(t, p.Oid, vp.Oid) + assert.Equal(t, p.Size, vp.Size) + + return &http.Response{StatusCode: http.StatusOK} + } else if strings.Contains(url, "error-response") { + er := &ErrorResponse{ + Message: "Object not found", + } + payload := new(bytes.Buffer) + jsoniter.NewEncoder(payload).Encode(er) + + return &http.Response{StatusCode: http.StatusNotFound, Body: ioutil.NopCloser(payload)} + } else { + t.Errorf("Unknown test case: %s", url) + return nil + } } hc := &http.Client{Transport: RoundTripFunc(roundTripHandler)} a := &BasicTransferAdapter{hc} - var cases = []struct { - response *ObjectResponse - expectederror string - }{ - // case 0 - { - response: &ObjectResponse{}, - expectederror: "Action 'download' not found", - }, - // case 1 - { - response: &ObjectResponse{ - Actions: map[string]*Link{"upload": nil}, + t.Run("Download", func(t *testing.T) { + cases := []struct { + link *Link + expectederror string + }{ + // case 0 + { + link: &Link{ + Href: "https://download-request.io", + Header: map[string]string{"test-header": "test-value"}, + }, + expectederror: "", }, - expectederror: "Action 'download' not found", - }, - // case 2 - { - response: &ObjectResponse{ - Actions: map[string]*Link{"download": { - Href: "https://valid-download-request.io", + // case 1 + { + link: &Link{ + Href: "https://error-response.io", Header: map[string]string{"test-header": "test-value"}, - }}, + }, + expectederror: "Object not found", }, - expectederror: "", - }, - } + } - for n, c := range cases { - _, err := a.Download(context.Background(), c.response) - if len(c.expectederror) > 0 { - assert.True(t, strings.Contains(err.Error(), c.expectederror), "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror) - } else { - assert.NoError(t, err, "case %d", n) + for n, c := range cases { + _, err := a.Download(context.Background(), c.link) + if len(c.expectederror) > 0 { + assert.True(t, strings.Contains(err.Error(), c.expectederror), "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror) + } else { + assert.NoError(t, err, "case %d", n) + } } - } + }) + + t.Run("Upload", func(t *testing.T) { + cases := []struct { + link *Link + expectederror string + }{ + // case 0 + { + link: &Link{ + Href: "https://upload-request.io", + Header: map[string]string{"test-header": "test-value"}, + }, + expectederror: "", + }, + // case 1 + { + link: &Link{ + Href: "https://error-response.io", + Header: map[string]string{"test-header": "test-value"}, + }, + expectederror: "Object not found", + }, + } + + for n, c := range cases { + err := a.Upload(context.Background(), c.link, p, bytes.NewBufferString("dummy")) + if len(c.expectederror) > 0 { + assert.True(t, strings.Contains(err.Error(), c.expectederror), "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror) + } else { + assert.NoError(t, err, "case %d", n) + } + } + }) + + t.Run("Verify", func(t *testing.T) { + cases := []struct { + link *Link + expectederror string + }{ + // case 0 + { + link: &Link{ + Href: "https://verify-request.io", + Header: map[string]string{"test-header": "test-value"}, + }, + expectederror: "", + }, + // case 1 + { + link: &Link{ + Href: "https://error-response.io", + Header: map[string]string{"test-header": "test-value"}, + }, + expectederror: "Object not found", + }, + } + + for n, c := range cases { + err := a.Verify(context.Background(), c.link, p) + if len(c.expectederror) > 0 { + assert.True(t, strings.Contains(err.Error(), c.expectederror), "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror) + } else { + assert.NoError(t, err, "case %d", n) + } + } + }) } diff --git a/modules/log/colors_router.go b/modules/log/colors_router.go index e291a0da99da..3064e005cfa9 100644 --- a/modules/log/colors_router.go +++ b/modules/log/colors_router.go @@ -19,7 +19,7 @@ var statusToColor = map[int][]byte{ 500: ColorBytes(Bold, BgRed), } -// ColoredStatus addes colors for HTTP status +// ColoredStatus adds colors for HTTP status func ColoredStatus(status int, s ...string) *ColoredValue { color, ok := statusToColor[status] if !ok { @@ -43,7 +43,7 @@ var methodToColor = map[string][]byte{ "HEAD": ColorBytes(FgBlue, Faint), } -// ColoredMethod addes colors for HtTP methos on log +// ColoredMethod adds colors for HTTP methods on log func ColoredMethod(method string) *ColoredValue { color, ok := methodToColor[method] if !ok { @@ -72,7 +72,7 @@ var ( wayTooLong = ColorBytes(BgMagenta) ) -// ColoredTime addes colors for time on log +// ColoredTime adds colors for time on log func ColoredTime(duration time.Duration) *ColoredValue { for i, k := range durations { if duration < k { diff --git a/modules/log/console_test.go b/modules/log/console_test.go index e7ed07123b06..4da87b48a309 100644 --- a/modules/log/console_test.go +++ b/modules/log/console_test.go @@ -48,7 +48,7 @@ func TestConsoleLoggerMinimalConfig(t *testing.T) { assert.Equal(t, prefix, realCW.Prefix) assert.Equal(t, "", string(written)) cw.Close() - assert.Equal(t, false, closed) + assert.False(t, closed) } } @@ -97,20 +97,20 @@ func TestConsoleLogger(t *testing.T) { expected := fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg) cw.LogEvent(&event) assert.Equal(t, expected, string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) written = written[:0] event.level = DEBUG expected = "" cw.LogEvent(&event) assert.Equal(t, expected, string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) event.level = TRACE expected = "" cw.LogEvent(&event) assert.Equal(t, expected, string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) nonMatchEvent := Event{ level: INFO, @@ -124,15 +124,15 @@ func TestConsoleLogger(t *testing.T) { expected = "" cw.LogEvent(&nonMatchEvent) assert.Equal(t, expected, string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) event.level = WARN expected = fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg) cw.LogEvent(&event) assert.Equal(t, expected, string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) written = written[:0] cw.Close() - assert.Equal(t, false, closed) + assert.False(t, closed) } diff --git a/modules/log/file.go b/modules/log/file.go index d5b38d4e0172..79cbe740fdb8 100644 --- a/modules/log/file.go +++ b/modules/log/file.go @@ -177,7 +177,7 @@ func (log *FileLogger) DoRotate() error { // close fd before rename // Rename the file to its newfound home - if err = os.Rename(log.Filename, fname); err != nil { + if err = util.Rename(log.Filename, fname); err != nil { return fmt.Errorf("Rotate: %v", err) } diff --git a/modules/log/file_test.go b/modules/log/file_test.go index af6fbcb29d11..7bc5f90037a7 100644 --- a/modules/log/file_test.go +++ b/modules/log/file_test.go @@ -30,7 +30,7 @@ func TestFileLoggerFails(t *testing.T) { fileLogger := NewFileLogger() //realFileLogger, ok := fileLogger.(*FileLogger) - //assert.Equal(t, true, ok) + //assert.True(t, ok) // Fail if there is bad json err = fileLogger.Init("{") @@ -161,7 +161,7 @@ func TestCompressFileLogger(t *testing.T) { fileLogger := NewFileLogger() realFileLogger, ok := fileLogger.(*FileLogger) - assert.Equal(t, true, ok) + assert.True(t, ok) location, _ := time.LoadLocation("EST") diff --git a/modules/log/log_test.go b/modules/log/log_test.go index 810505dea5b4..d14daa5a206d 100644 --- a/modules/log/log_test.go +++ b/modules/log/log_test.go @@ -26,9 +26,9 @@ func baseConsoleTest(t *testing.T, logger *MultiChannelledLogger) (chan []byte, channelledLog := m.GetEventLogger("console") assert.NotEmpty(t, channelledLog) realChanLog, ok := channelledLog.(*ChannelledLog) - assert.Equal(t, true, ok) + assert.True(t, ok) realCL, ok := realChanLog.loggerProvider.(*ConsoleLogger) - assert.Equal(t, true, ok) + assert.True(t, ok) assert.Equal(t, INFO, realCL.Level) realCL.out = c @@ -38,20 +38,20 @@ func baseConsoleTest(t *testing.T, logger *MultiChannelledLogger) (chan []byte, logger.Log(0, INFO, format, args...) line := <-written assert.Contains(t, string(line), fmt.Sprintf(format, args...)) - assert.Equal(t, false, <-closed) + assert.False(t, <-closed) format = "test2: %s" logger.Warn(format, args...) line = <-written assert.Contains(t, string(line), fmt.Sprintf(format, args...)) - assert.Equal(t, false, <-closed) + assert.False(t, <-closed) format = "testerror: %s" logger.Error(format, args...) line = <-written assert.Contains(t, string(line), fmt.Sprintf(format, args...)) - assert.Equal(t, false, <-closed) + assert.False(t, <-closed) return written, closed } @@ -63,7 +63,7 @@ func TestNewLoggerUnexported(t *testing.T) { out := logger.MultiChannelledLog.GetEventLogger("console") assert.NotEmpty(t, out) chanlog, ok := out.(*ChannelledLog) - assert.Equal(t, true, ok) + assert.True(t, ok) assert.Equal(t, "console", chanlog.provider) assert.Equal(t, INFO, logger.GetLevel()) baseConsoleTest(t, logger) @@ -74,11 +74,11 @@ func TestNewLoggger(t *testing.T) { logger := NewLogger(0, "console", "console", fmt.Sprintf(`{"level":"%s"}`, level.String())) assert.Equal(t, INFO, GetLevel()) - assert.Equal(t, false, IsTrace()) - assert.Equal(t, false, IsDebug()) - assert.Equal(t, true, IsInfo()) - assert.Equal(t, true, IsWarn()) - assert.Equal(t, true, IsError()) + assert.False(t, IsTrace()) + assert.False(t, IsDebug()) + assert.True(t, IsInfo()) + assert.True(t, IsWarn()) + assert.True(t, IsError()) written, closed := baseConsoleTest(t, logger) @@ -88,17 +88,17 @@ func TestNewLoggger(t *testing.T) { Log(0, INFO, format, args...) line := <-written assert.Contains(t, string(line), fmt.Sprintf(format, args...)) - assert.Equal(t, false, <-closed) + assert.False(t, <-closed) Info(format, args...) line = <-written assert.Contains(t, string(line), fmt.Sprintf(format, args...)) - assert.Equal(t, false, <-closed) + assert.False(t, <-closed) go DelLogger("console") line = <-written assert.Equal(t, "", string(line)) - assert.Equal(t, true, <-closed) + assert.True(t, <-closed) } func TestNewLogggerRecreate(t *testing.T) { @@ -106,11 +106,11 @@ func TestNewLogggerRecreate(t *testing.T) { NewLogger(0, "console", "console", fmt.Sprintf(`{"level":"%s"}`, level.String())) assert.Equal(t, INFO, GetLevel()) - assert.Equal(t, false, IsTrace()) - assert.Equal(t, false, IsDebug()) - assert.Equal(t, true, IsInfo()) - assert.Equal(t, true, IsWarn()) - assert.Equal(t, true, IsError()) + assert.False(t, IsTrace()) + assert.False(t, IsDebug()) + assert.True(t, IsInfo()) + assert.True(t, IsWarn()) + assert.True(t, IsError()) format := "test: %s" args := []interface{}{"A"} @@ -120,11 +120,11 @@ func TestNewLogggerRecreate(t *testing.T) { NewLogger(0, "console", "console", fmt.Sprintf(`{"level":"%s"}`, level.String())) assert.Equal(t, INFO, GetLevel()) - assert.Equal(t, false, IsTrace()) - assert.Equal(t, false, IsDebug()) - assert.Equal(t, true, IsInfo()) - assert.Equal(t, true, IsWarn()) - assert.Equal(t, true, IsError()) + assert.False(t, IsTrace()) + assert.False(t, IsDebug()) + assert.True(t, IsInfo()) + assert.True(t, IsWarn()) + assert.True(t, IsError()) Log(0, INFO, format, args...) @@ -150,5 +150,5 @@ func TestNewNamedLogger(t *testing.T) { go DelNamedLogger("test") line := <-written assert.Equal(t, "", string(line)) - assert.Equal(t, true, <-closed) + assert.True(t, <-closed) } diff --git a/modules/log/smtp_test.go b/modules/log/smtp_test.go index 216d55521549..c8758bf6bece 100644 --- a/modules/log/smtp_test.go +++ b/modules/log/smtp_test.go @@ -26,7 +26,7 @@ func TestSMTPLogger(t *testing.T) { logger := NewSMTPLogger() smtpLogger, ok := logger.(*SMTPLogger) - assert.Equal(t, true, ok) + assert.True(t, ok) err := logger.Init(fmt.Sprintf("{\"prefix\":\"%s\",\"level\":\"%s\",\"flags\":%d,\"username\":\"%s\",\"password\":\"%s\",\"host\":\"%s\",\"subject\":\"%s\",\"sendTos\":[\"%s\",\"%s\"]}", prefix, level.String(), flags, username, password, host, subject, sendTos[0], sendTos[1])) assert.NoError(t, err) diff --git a/modules/log/writer_test.go b/modules/log/writer_test.go index 886dd58fb38f..99a5fd340689 100644 --- a/modules/log/writer_test.go +++ b/modules/log/writer_test.go @@ -64,44 +64,44 @@ func TestBaseLogger(t *testing.T) { expected := fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg) b.LogEvent(&event) assert.Equal(t, expected, string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) written = written[:0] event.level = DEBUG expected = "" b.LogEvent(&event) assert.Equal(t, expected, string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) event.level = TRACE expected = "" b.LogEvent(&event) assert.Equal(t, expected, string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) event.level = WARN expected = fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg) b.LogEvent(&event) assert.Equal(t, expected, string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) written = written[:0] event.level = ERROR expected = fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg) b.LogEvent(&event) assert.Equal(t, expected, string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) written = written[:0] event.level = CRITICAL expected = fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg) b.LogEvent(&event) assert.Equal(t, expected, string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) written = written[:0] b.Close() - assert.Equal(t, true, closed) + assert.True(t, closed) } func TestBaseLoggerDated(t *testing.T) { @@ -142,46 +142,46 @@ func TestBaseLoggerDated(t *testing.T) { expected := fmt.Sprintf("%s%s %s:%d [%s] %s", prefix, dateString, "FILENAME", event.line, strings.ToUpper(event.level.String()), event.msg) b.LogEvent(&event) assert.Equal(t, expected, string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) written = written[:0] event.level = INFO expected = "" b.LogEvent(&event) assert.Equal(t, expected, string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) written = written[:0] event.level = ERROR expected = fmt.Sprintf("%s%s %s:%d [%s] %s", prefix, dateString, "FILENAME", event.line, strings.ToUpper(event.level.String()), event.msg) b.LogEvent(&event) assert.Equal(t, expected, string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) written = written[:0] event.level = DEBUG expected = "" b.LogEvent(&event) assert.Equal(t, expected, string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) written = written[:0] event.level = CRITICAL expected = fmt.Sprintf("%s%s %s:%d [%s] %s", prefix, dateString, "FILENAME", event.line, strings.ToUpper(event.level.String()), event.msg) b.LogEvent(&event) assert.Equal(t, expected, string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) written = written[:0] event.level = TRACE expected = "" b.LogEvent(&event) assert.Equal(t, expected, string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) written = written[:0] b.Close() - assert.Equal(t, true, closed) + assert.True(t, closed) } func TestBaseLoggerMultiLineNoFlagsRegexp(t *testing.T) { @@ -222,20 +222,20 @@ func TestBaseLoggerMultiLineNoFlagsRegexp(t *testing.T) { expected := "TEST\n\tMESSAGE\n\tTEST\n" b.LogEvent(&event) assert.Equal(t, expected, string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) written = written[:0] event.filename = "ELSEWHERE" b.LogEvent(&event) assert.Equal(t, "", string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) written = written[:0] event.caller = "FILENAME" b.LogEvent(&event) assert.Equal(t, expected, string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) written = written[:0] event = Event{ @@ -249,9 +249,8 @@ func TestBaseLoggerMultiLineNoFlagsRegexp(t *testing.T) { expected = "TEST\n\tFILENAME\n\tTEST\n" b.LogEvent(&event) assert.Equal(t, expected, string(written)) - assert.Equal(t, false, closed) + assert.False(t, closed) written = written[:0] - } func TestBrokenRegexp(t *testing.T) { @@ -273,5 +272,5 @@ func TestBrokenRegexp(t *testing.T) { b.NewWriterLogger(c) assert.Empty(t, b.regexp) b.Close() - assert.Equal(t, true, closed) + assert.True(t, closed) } diff --git a/modules/markup/csv/csv.go b/modules/markup/csv/csv.go index 6572b0ee1e81..8a4df8951154 100644 --- a/modules/markup/csv/csv.go +++ b/modules/markup/csv/csv.go @@ -10,6 +10,7 @@ import ( "html" "io" "io/ioutil" + "regexp" "strconv" "code.gitea.io/gitea/modules/csv" @@ -38,6 +39,15 @@ func (Renderer) Extensions() []string { return []string{".csv", ".tsv"} } +// SanitizerRules implements markup.Renderer +func (Renderer) SanitizerRules() []setting.MarkupSanitizerRule { + return []setting.MarkupSanitizerRule{ + {Element: "table", AllowAttr: "class", Regexp: regexp.MustCompile(`data-table`)}, + {Element: "th", AllowAttr: "class", Regexp: regexp.MustCompile(`line-num`)}, + {Element: "td", AllowAttr: "class", Regexp: regexp.MustCompile(`line-num`)}, + } +} + func writeField(w io.Writer, element, class, field string) error { if _, err := io.WriteString(w, "<"); err != nil { return err diff --git a/modules/markup/external/external.go b/modules/markup/external/external.go index 62814c9914b9..52139f5a49c3 100644 --- a/modules/markup/external/external.go +++ b/modules/markup/external/external.go @@ -5,6 +5,7 @@ package external import ( + "context" "fmt" "io" "io/ioutil" @@ -15,6 +16,7 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/markup" + "code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" ) @@ -30,7 +32,7 @@ func RegisterRenderers() { // Renderer implements markup.Renderer for external tools type Renderer struct { - setting.MarkupRenderer + *setting.MarkupRenderer } // Name returns the external tool name @@ -48,6 +50,11 @@ func (p *Renderer) Extensions() []string { return p.FileExtensions } +// SanitizerRules implements markup.Renderer +func (p *Renderer) SanitizerRules() []setting.MarkupSanitizerRule { + return p.MarkupSanitizerRules +} + func envMark(envName string) string { if runtime.GOOS == "windows" { return "%" + envName + "%" @@ -91,7 +98,17 @@ func (p *Renderer) Render(ctx *markup.RenderContext, input io.Reader, output io. args = append(args, f.Name()) } - cmd := exec.Command(commands[0], args...) + if ctx == nil || ctx.Ctx == nil { + return fmt.Errorf("RenderContext did not provide context") + } + + processCtx, cancel := context.WithCancel(ctx.Ctx) + defer cancel() + + pid := process.GetManager().Add(fmt.Sprintf("Render [%s] for %s", commands[0], ctx.URLPrefix), cancel) + defer process.GetManager().Remove(pid) + + cmd := exec.CommandContext(processCtx, commands[0], args...) cmd.Env = append( os.Environ(), "GITEA_PREFIX_SRC="+ctx.URLPrefix, diff --git a/modules/markup/html.go b/modules/markup/html.go index 7c4c10ee2210..6d0b4fbea2f3 100644 --- a/modules/markup/html.go +++ b/modules/markup/html.go @@ -6,7 +6,6 @@ package markup import ( "bytes" - "fmt" "io" "io/ioutil" "net/url" @@ -66,7 +65,7 @@ var ( blackfridayExtRegex = regexp.MustCompile(`[^:]*:user-content-`) // EmojiShortCodeRegex find emoji by alias like :smile: - EmojiShortCodeRegex = regexp.MustCompile(`\:[\w\+\-]+\:{1}`) + EmojiShortCodeRegex = regexp.MustCompile(`:[\w\+\-]+:`) ) // CSS class for action keywords (e.g. "closes: #1") @@ -89,6 +88,7 @@ func isLinkStr(link string) bool { return validLinksPattern.MatchString(link) } +// FIXME: This function is not concurrent safe func getIssueFullPattern() *regexp.Regexp { if issueFullPattern == nil { issueFullPattern = regexp.MustCompile(regexp.QuoteMeta(setting.AppURL) + @@ -274,7 +274,7 @@ func RenderDescriptionHTML( } // RenderEmoji for when we want to just process emoji and shortcodes -// in various places it isn't already run through the normal markdown procesor +// in various places it isn't already run through the normal markdown processor func RenderEmoji( content string, ) (string, error) { @@ -285,6 +285,7 @@ var tagCleaner = regexp.MustCompile(`<((?:/?\w+/\w+)|(?:/[\w ]+/)|(/?[hH][tT][mM var nulCleaner = strings.NewReplacer("\000", "") func postProcess(ctx *RenderContext, procs []processor, input io.Reader, output io.Writer) error { + defer ctx.Cancel() // FIXME: don't read all content to memory rawHTML, err := ioutil.ReadAll(input) if err != nil { @@ -302,27 +303,26 @@ func postProcess(ctx *RenderContext, procs []processor, input io.Reader, output _, _ = res.WriteString("") // parse the HTML - nodes, err := html.ParseFragment(res, nil) + node, err := html.Parse(res) if err != nil { return &postProcessError{"invalid HTML", err} } - for _, node := range nodes { - visitNode(ctx, procs, node, true) + if node.Type == html.DocumentNode { + node = node.FirstChild } - newNodes := make([]*html.Node, 0, len(nodes)) + visitNode(ctx, procs, node, true) - for _, node := range nodes { - if node.Data == "html" { - node = node.FirstChild - for node != nil && node.Data != "body" { - node = node.NextSibling - } - } - if node == nil { - continue + newNodes := make([]*html.Node, 0, 5) + + if node.Data == "html" { + node = node.FirstChild + for node != nil && node.Data != "body" { + node = node.NextSibling } + } + if node != nil { if node.Data == "body" { child := node.FirstChild for child != nil { @@ -364,24 +364,20 @@ func visitNode(ctx *RenderContext, procs []processor, node *html.Node, visitText } case html.ElementNode: if node.Data == "img" { - attrs := node.Attr - for idx, attr := range attrs { + for i, attr := range node.Attr { if attr.Key != "src" { continue } - link := []byte(attr.Val) - if len(link) > 0 && !IsLink(link) { + if len(attr.Val) > 0 && !isLinkStr(attr.Val) && !strings.HasPrefix(attr.Val, "data:image/") { prefix := ctx.URLPrefix if ctx.IsWiki { prefix = util.URLJoin(prefix, "wiki", "raw") } prefix = strings.Replace(prefix, "/src/", "/media/", 1) - lnk := string(link) - lnk = util.URLJoin(prefix, lnk) - link = []byte(lnk) + attr.Val = util.URLJoin(prefix, attr.Val) } - node.Attr[idx].Val = string(link) + node.Attr[i] = attr } } else if node.Data == "a" { visitText = false @@ -464,17 +460,14 @@ func createEmoji(content, class, name string) *html.Node { return span } -func createCustomEmoji(alias, class string) *html.Node { - +func createCustomEmoji(alias string) *html.Node { span := &html.Node{ Type: html.ElementNode, Data: atom.Span.String(), Attr: []html.Attribute{}, } - if class != "" { - span.Attr = append(span.Attr, html.Attribute{Key: "class", Val: class}) - span.Attr = append(span.Attr, html.Attribute{Key: "aria-label", Val: alias}) - } + span.Attr = append(span.Attr, html.Attribute{Key: "class", Val: "emoji"}) + span.Attr = append(span.Attr, html.Attribute{Key: "aria-label", Val: alias}) img := &html.Node{ Type: html.ElementNode, @@ -482,10 +475,8 @@ func createCustomEmoji(alias, class string) *html.Node { Data: "img", Attr: []html.Attribute{}, } - if class != "" { - img.Attr = append(img.Attr, html.Attribute{Key: "alt", Val: fmt.Sprintf(`:%s:`, alias)}) - img.Attr = append(img.Attr, html.Attribute{Key: "src", Val: fmt.Sprintf(`%s/img/emoji/%s.png`, setting.StaticURLPrefix, alias)}) - } + img.Attr = append(img.Attr, html.Attribute{Key: "alt", Val: ":" + alias + ":"}) + img.Attr = append(img.Attr, html.Attribute{Key: "src", Val: setting.StaticURLPrefix + "/assets/img/emoji/" + alias + ".png"}) span.AppendChild(img) return span @@ -571,26 +562,38 @@ func replaceContentList(node *html.Node, i, j int, newNodes []*html.Node) { } func mentionProcessor(ctx *RenderContext, node *html.Node) { - // We replace only the first mention; other mentions will be addressed later - found, loc := references.FindFirstMentionBytes([]byte(node.Data)) - if !found { - return - } - mention := node.Data[loc.Start:loc.End] - var teams string - teams, ok := ctx.Metas["teams"] - // FIXME: util.URLJoin may not be necessary here: - // - setting.AppURL is defined to have a terminal '/' so unless mention[1:] - // is an AppSubURL link we can probably fallback to concatenation. - // team mention should follow @orgName/teamName style - if ok && strings.Contains(mention, "/") { - mentionOrgAndTeam := strings.Split(mention, "/") - if mentionOrgAndTeam[0][1:] == ctx.Metas["org"] && strings.Contains(teams, ","+strings.ToLower(mentionOrgAndTeam[1])+",") { - replaceContent(node, loc.Start, loc.End, createLink(util.URLJoin(setting.AppURL, "org", ctx.Metas["org"], "teams", mentionOrgAndTeam[1]), mention, "mention")) + start := 0 + next := node.NextSibling + for node != nil && node != next && start < len(node.Data) { + // We replace only the first mention; other mentions will be addressed later + found, loc := references.FindFirstMentionBytes([]byte(node.Data[start:])) + if !found { + return } - return + loc.Start += start + loc.End += start + mention := node.Data[loc.Start:loc.End] + var teams string + teams, ok := ctx.Metas["teams"] + // FIXME: util.URLJoin may not be necessary here: + // - setting.AppURL is defined to have a terminal '/' so unless mention[1:] + // is an AppSubURL link we can probably fallback to concatenation. + // team mention should follow @orgName/teamName style + if ok && strings.Contains(mention, "/") { + mentionOrgAndTeam := strings.Split(mention, "/") + if mentionOrgAndTeam[0][1:] == ctx.Metas["org"] && strings.Contains(teams, ","+strings.ToLower(mentionOrgAndTeam[1])+",") { + replaceContent(node, loc.Start, loc.End, createLink(util.URLJoin(setting.AppURL, "org", ctx.Metas["org"], "teams", mentionOrgAndTeam[1]), mention, "mention")) + node = node.NextSibling.NextSibling + start = 0 + continue + } + start = loc.End + continue + } + replaceContent(node, loc.Start, loc.End, createLink(util.URLJoin(setting.AppURL, mention[1:]), mention, "mention")) + node = node.NextSibling.NextSibling + start = 0 } - replaceContent(node, loc.Start, loc.End, createLink(util.URLJoin(setting.AppURL, mention[1:]), mention, "mention")) } func shortLinkProcessor(ctx *RenderContext, node *html.Node) { @@ -598,188 +601,196 @@ func shortLinkProcessor(ctx *RenderContext, node *html.Node) { } func shortLinkProcessorFull(ctx *RenderContext, node *html.Node, noLink bool) { - m := shortLinkPattern.FindStringSubmatchIndex(node.Data) - if m == nil { - return - } + next := node.NextSibling + for node != nil && node != next { + m := shortLinkPattern.FindStringSubmatchIndex(node.Data) + if m == nil { + return + } - content := node.Data[m[2]:m[3]] - tail := node.Data[m[4]:m[5]] - props := make(map[string]string) - - // MediaWiki uses [[link|text]], while GitHub uses [[text|link]] - // It makes page handling terrible, but we prefer GitHub syntax - // And fall back to MediaWiki only when it is obvious from the look - // Of text and link contents - sl := strings.Split(content, "|") - for _, v := range sl { - if equalPos := strings.IndexByte(v, '='); equalPos == -1 { - // There is no equal in this argument; this is a mandatory arg - if props["name"] == "" { - if isLinkStr(v) { - // If we clearly see it is a link, we save it so - - // But first we need to ensure, that if both mandatory args provided - // look like links, we stick to GitHub syntax - if props["link"] != "" { - props["name"] = props["link"] - } + content := node.Data[m[2]:m[3]] + tail := node.Data[m[4]:m[5]] + props := make(map[string]string) + + // MediaWiki uses [[link|text]], while GitHub uses [[text|link]] + // It makes page handling terrible, but we prefer GitHub syntax + // And fall back to MediaWiki only when it is obvious from the look + // Of text and link contents + sl := strings.Split(content, "|") + for _, v := range sl { + if equalPos := strings.IndexByte(v, '='); equalPos == -1 { + // There is no equal in this argument; this is a mandatory arg + if props["name"] == "" { + if isLinkStr(v) { + // If we clearly see it is a link, we save it so + + // But first we need to ensure, that if both mandatory args provided + // look like links, we stick to GitHub syntax + if props["link"] != "" { + props["name"] = props["link"] + } - props["link"] = strings.TrimSpace(v) + props["link"] = strings.TrimSpace(v) + } else { + props["name"] = v + } } else { - props["name"] = v + props["link"] = strings.TrimSpace(v) } } else { - props["link"] = strings.TrimSpace(v) - } - } else { - // There is an equal; optional argument. - - sep := strings.IndexByte(v, '=') - key, val := v[:sep], html.UnescapeString(v[sep+1:]) - - // When parsing HTML, x/net/html will change all quotes which are - // not used for syntax into UTF-8 quotes. So checking val[0] won't - // be enough, since that only checks a single byte. - if len(val) > 1 { - if (strings.HasPrefix(val, "“") && strings.HasSuffix(val, "”")) || - (strings.HasPrefix(val, "‘") && strings.HasSuffix(val, "’")) { - const lenQuote = len("‘") - val = val[lenQuote : len(val)-lenQuote] - } else if (strings.HasPrefix(val, "\"") && strings.HasSuffix(val, "\"")) || - (strings.HasPrefix(val, "'") && strings.HasSuffix(val, "'")) { - val = val[1 : len(val)-1] - } else if strings.HasPrefix(val, "'") && strings.HasSuffix(val, "’") { - const lenQuote = len("‘") - val = val[1 : len(val)-lenQuote] + // There is an equal; optional argument. + + sep := strings.IndexByte(v, '=') + key, val := v[:sep], html.UnescapeString(v[sep+1:]) + + // When parsing HTML, x/net/html will change all quotes which are + // not used for syntax into UTF-8 quotes. So checking val[0] won't + // be enough, since that only checks a single byte. + if len(val) > 1 { + if (strings.HasPrefix(val, "“") && strings.HasSuffix(val, "”")) || + (strings.HasPrefix(val, "‘") && strings.HasSuffix(val, "’")) { + const lenQuote = len("‘") + val = val[lenQuote : len(val)-lenQuote] + } else if (strings.HasPrefix(val, "\"") && strings.HasSuffix(val, "\"")) || + (strings.HasPrefix(val, "'") && strings.HasSuffix(val, "'")) { + val = val[1 : len(val)-1] + } else if strings.HasPrefix(val, "'") && strings.HasSuffix(val, "’") { + const lenQuote = len("‘") + val = val[1 : len(val)-lenQuote] + } } + props[key] = val } - props[key] = val } - } - var name, link string - if props["link"] != "" { - link = props["link"] - } else if props["name"] != "" { - link = props["name"] - } - if props["title"] != "" { - name = props["title"] - } else if props["name"] != "" { - name = props["name"] - } else { - name = link - } - - name += tail - image := false - switch ext := filepath.Ext(link); ext { - // fast path: empty string, ignore - case "": - break - case ".jpg", ".jpeg", ".png", ".tif", ".tiff", ".webp", ".gif", ".bmp", ".ico", ".svg": - image = true - } - - childNode := &html.Node{} - linkNode := &html.Node{ - FirstChild: childNode, - LastChild: childNode, - Type: html.ElementNode, - Data: "a", - DataAtom: atom.A, - } - childNode.Parent = linkNode - absoluteLink := isLinkStr(link) - if !absoluteLink { - if image { - link = strings.ReplaceAll(link, " ", "+") + var name, link string + if props["link"] != "" { + link = props["link"] + } else if props["name"] != "" { + link = props["name"] + } + if props["title"] != "" { + name = props["title"] + } else if props["name"] != "" { + name = props["name"] } else { - link = strings.ReplaceAll(link, " ", "-") + name = link + } + + name += tail + image := false + switch ext := filepath.Ext(link); ext { + // fast path: empty string, ignore + case "": + // leave image as false + case ".jpg", ".jpeg", ".png", ".tif", ".tiff", ".webp", ".gif", ".bmp", ".ico", ".svg": + image = true } - if !strings.Contains(link, "/") { - link = url.PathEscape(link) + + childNode := &html.Node{} + linkNode := &html.Node{ + FirstChild: childNode, + LastChild: childNode, + Type: html.ElementNode, + Data: "a", + DataAtom: atom.A, } - } - urlPrefix := ctx.URLPrefix - if image { + childNode.Parent = linkNode + absoluteLink := isLinkStr(link) if !absoluteLink { - if IsSameDomain(urlPrefix) { - urlPrefix = strings.Replace(urlPrefix, "/src/", "/raw/", 1) + if image { + link = strings.ReplaceAll(link, " ", "+") + } else { + link = strings.ReplaceAll(link, " ", "-") } - if ctx.IsWiki { - link = util.URLJoin("wiki", "raw", link) + if !strings.Contains(link, "/") { + link = url.PathEscape(link) } - link = util.URLJoin(urlPrefix, link) - } - title := props["title"] - if title == "" { - title = props["alt"] - } - if title == "" { - title = path.Base(name) - } - alt := props["alt"] - if alt == "" { - alt = name } + urlPrefix := ctx.URLPrefix + if image { + if !absoluteLink { + if IsSameDomain(urlPrefix) { + urlPrefix = strings.Replace(urlPrefix, "/src/", "/raw/", 1) + } + if ctx.IsWiki { + link = util.URLJoin("wiki", "raw", link) + } + link = util.URLJoin(urlPrefix, link) + } + title := props["title"] + if title == "" { + title = props["alt"] + } + if title == "" { + title = path.Base(name) + } + alt := props["alt"] + if alt == "" { + alt = name + } - // make the childNode an image - if we can, we also place the alt - childNode.Type = html.ElementNode - childNode.Data = "img" - childNode.DataAtom = atom.Img - childNode.Attr = []html.Attribute{ - {Key: "src", Val: link}, - {Key: "title", Val: title}, - {Key: "alt", Val: alt}, - } - if alt == "" { - childNode.Attr = childNode.Attr[:2] - } - } else { - if !absoluteLink { - if ctx.IsWiki { - link = util.URLJoin("wiki", link) + // make the childNode an image - if we can, we also place the alt + childNode.Type = html.ElementNode + childNode.Data = "img" + childNode.DataAtom = atom.Img + childNode.Attr = []html.Attribute{ + {Key: "src", Val: link}, + {Key: "title", Val: title}, + {Key: "alt", Val: alt}, + } + if alt == "" { + childNode.Attr = childNode.Attr[:2] + } + } else { + if !absoluteLink { + if ctx.IsWiki { + link = util.URLJoin("wiki", link) + } + link = util.URLJoin(urlPrefix, link) } - link = util.URLJoin(urlPrefix, link) + childNode.Type = html.TextNode + childNode.Data = name } - childNode.Type = html.TextNode - childNode.Data = name - } - if noLink { - linkNode = childNode - } else { - linkNode.Attr = []html.Attribute{{Key: "href", Val: link}} + if noLink { + linkNode = childNode + } else { + linkNode.Attr = []html.Attribute{{Key: "href", Val: link}} + } + replaceContent(node, m[0], m[1], linkNode) + node = node.NextSibling.NextSibling } - replaceContent(node, m[0], m[1], linkNode) } func fullIssuePatternProcessor(ctx *RenderContext, node *html.Node) { if ctx.Metas == nil { return } - m := getIssueFullPattern().FindStringSubmatchIndex(node.Data) - if m == nil { - return - } - link := node.Data[m[0]:m[1]] - id := "#" + node.Data[m[2]:m[3]] - - // extract repo and org name from matched link like - // http://localhost:3000/gituser/myrepo/issues/1 - linkParts := strings.Split(path.Clean(link), "/") - matchOrg := linkParts[len(linkParts)-4] - matchRepo := linkParts[len(linkParts)-3] - - if matchOrg == ctx.Metas["user"] && matchRepo == ctx.Metas["repo"] { - // TODO if m[4]:m[5] is not nil, then link is to a comment, - // and we should indicate that in the text somehow - replaceContent(node, m[0], m[1], createLink(link, id, "ref-issue")) - - } else { - orgRepoID := matchOrg + "/" + matchRepo + id - replaceContent(node, m[0], m[1], createLink(link, orgRepoID, "ref-issue")) + + next := node.NextSibling + for node != nil && node != next { + m := getIssueFullPattern().FindStringSubmatchIndex(node.Data) + if m == nil { + return + } + link := node.Data[m[0]:m[1]] + id := "#" + node.Data[m[2]:m[3]] + + // extract repo and org name from matched link like + // http://localhost:3000/gituser/myrepo/issues/1 + linkParts := strings.Split(path.Clean(link), "/") + matchOrg := linkParts[len(linkParts)-4] + matchRepo := linkParts[len(linkParts)-3] + + if matchOrg == ctx.Metas["user"] && matchRepo == ctx.Metas["repo"] { + // TODO if m[4]:m[5] is not nil, then link is to a comment, + // and we should indicate that in the text somehow + replaceContent(node, m[0], m[1], createLink(link, id, "ref-issue")) + } else { + orgRepoID := matchOrg + "/" + matchRepo + id + replaceContent(node, m[0], m[1], createLink(link, orgRepoID, "ref-issue")) + } + node = node.NextSibling.NextSibling } } @@ -787,70 +798,74 @@ func issueIndexPatternProcessor(ctx *RenderContext, node *html.Node) { if ctx.Metas == nil { return } - var ( found bool ref *references.RenderizableReference ) - _, exttrack := ctx.Metas["format"] - alphanum := ctx.Metas["style"] == IssueNameStyleAlphanumeric - - // Repos with external issue trackers might still need to reference local PRs - // We need to concern with the first one that shows up in the text, whichever it is - found, ref = references.FindRenderizableReferenceNumeric(node.Data, exttrack && alphanum) - if exttrack && alphanum { - if found2, ref2 := references.FindRenderizableReferenceAlphanumeric(node.Data); found2 { - if !found || ref2.RefLocation.Start < ref.RefLocation.Start { - found = true - ref = ref2 + next := node.NextSibling + for node != nil && node != next { + _, exttrack := ctx.Metas["format"] + alphanum := ctx.Metas["style"] == IssueNameStyleAlphanumeric + + // Repos with external issue trackers might still need to reference local PRs + // We need to concern with the first one that shows up in the text, whichever it is + found, ref = references.FindRenderizableReferenceNumeric(node.Data, exttrack && alphanum) + if exttrack && alphanum { + if found2, ref2 := references.FindRenderizableReferenceAlphanumeric(node.Data); found2 { + if !found || ref2.RefLocation.Start < ref.RefLocation.Start { + found = true + ref = ref2 + } } } - } - if !found { - return - } - - var link *html.Node - reftext := node.Data[ref.RefLocation.Start:ref.RefLocation.End] - if exttrack && !ref.IsPull { - ctx.Metas["index"] = ref.Issue - link = createLink(com.Expand(ctx.Metas["format"], ctx.Metas), reftext, "ref-issue") - } else { - // Path determines the type of link that will be rendered. It's unknown at this point whether - // the linked item is actually a PR or an issue. Luckily it's of no real consequence because - // Gitea will redirect on click as appropriate. - path := "issues" - if ref.IsPull { - path = "pulls" + if !found { + return } - if ref.Owner == "" { - link = createLink(util.URLJoin(setting.AppURL, ctx.Metas["user"], ctx.Metas["repo"], path, ref.Issue), reftext, "ref-issue") + + var link *html.Node + reftext := node.Data[ref.RefLocation.Start:ref.RefLocation.End] + if exttrack && !ref.IsPull { + ctx.Metas["index"] = ref.Issue + link = createLink(com.Expand(ctx.Metas["format"], ctx.Metas), reftext, "ref-issue") } else { - link = createLink(util.URLJoin(setting.AppURL, ref.Owner, ref.Name, path, ref.Issue), reftext, "ref-issue") + // Path determines the type of link that will be rendered. It's unknown at this point whether + // the linked item is actually a PR or an issue. Luckily it's of no real consequence because + // Gitea will redirect on click as appropriate. + path := "issues" + if ref.IsPull { + path = "pulls" + } + if ref.Owner == "" { + link = createLink(util.URLJoin(setting.AppURL, ctx.Metas["user"], ctx.Metas["repo"], path, ref.Issue), reftext, "ref-issue") + } else { + link = createLink(util.URLJoin(setting.AppURL, ref.Owner, ref.Name, path, ref.Issue), reftext, "ref-issue") + } } - } - if ref.Action == references.XRefActionNone { - replaceContent(node, ref.RefLocation.Start, ref.RefLocation.End, link) - return - } + if ref.Action == references.XRefActionNone { + replaceContent(node, ref.RefLocation.Start, ref.RefLocation.End, link) + node = node.NextSibling.NextSibling + continue + } - // Decorate action keywords if actionable - var keyword *html.Node - if references.IsXrefActionable(ref, exttrack, alphanum) { - keyword = createKeyword(node.Data[ref.ActionLocation.Start:ref.ActionLocation.End]) - } else { - keyword = &html.Node{ + // Decorate action keywords if actionable + var keyword *html.Node + if references.IsXrefActionable(ref, exttrack, alphanum) { + keyword = createKeyword(node.Data[ref.ActionLocation.Start:ref.ActionLocation.End]) + } else { + keyword = &html.Node{ + Type: html.TextNode, + Data: node.Data[ref.ActionLocation.Start:ref.ActionLocation.End], + } + } + spaces := &html.Node{ Type: html.TextNode, - Data: node.Data[ref.ActionLocation.Start:ref.ActionLocation.End], + Data: node.Data[ref.ActionLocation.End:ref.RefLocation.Start], } + replaceContentList(node, ref.ActionLocation.Start, ref.RefLocation.End, []*html.Node{keyword, spaces, link}) + node = node.NextSibling.NextSibling.NextSibling.NextSibling } - spaces := &html.Node{ - Type: html.TextNode, - Data: node.Data[ref.ActionLocation.End:ref.RefLocation.Start], - } - replaceContentList(node, ref.ActionLocation.Start, ref.RefLocation.End, []*html.Node{keyword, spaces, link}) } // fullSha1PatternProcessor renders SHA containing URLs @@ -858,86 +873,111 @@ func fullSha1PatternProcessor(ctx *RenderContext, node *html.Node) { if ctx.Metas == nil { return } - m := anySHA1Pattern.FindStringSubmatchIndex(node.Data) - if m == nil { - return - } - urlFull := node.Data[m[0]:m[1]] - text := base.ShortSha(node.Data[m[2]:m[3]]) + next := node.NextSibling + for node != nil && node != next { + m := anySHA1Pattern.FindStringSubmatchIndex(node.Data) + if m == nil { + return + } - // 3rd capture group matches a optional path - subpath := "" - if m[5] > 0 { - subpath = node.Data[m[4]:m[5]] - } + urlFull := node.Data[m[0]:m[1]] + text := base.ShortSha(node.Data[m[2]:m[3]]) - // 4th capture group matches a optional url hash - hash := "" - if m[7] > 0 { - hash = node.Data[m[6]:m[7]][1:] - } + // 3rd capture group matches a optional path + subpath := "" + if m[5] > 0 { + subpath = node.Data[m[4]:m[5]] + } - start := m[0] - end := m[1] + // 4th capture group matches a optional url hash + hash := "" + if m[7] > 0 { + hash = node.Data[m[6]:m[7]][1:] + } - // If url ends in '.', it's very likely that it is not part of the - // actual url but used to finish a sentence. - if strings.HasSuffix(urlFull, ".") { - end-- - urlFull = urlFull[:len(urlFull)-1] - if hash != "" { - hash = hash[:len(hash)-1] - } else if subpath != "" { - subpath = subpath[:len(subpath)-1] + start := m[0] + end := m[1] + + // If url ends in '.', it's very likely that it is not part of the + // actual url but used to finish a sentence. + if strings.HasSuffix(urlFull, ".") { + end-- + urlFull = urlFull[:len(urlFull)-1] + if hash != "" { + hash = hash[:len(hash)-1] + } else if subpath != "" { + subpath = subpath[:len(subpath)-1] + } } - } - if subpath != "" { - text += subpath - } + if subpath != "" { + text += subpath + } - if hash != "" { - text += " (" + hash + ")" - } + if hash != "" { + text += " (" + hash + ")" + } - replaceContent(node, start, end, createCodeLink(urlFull, text, "commit")) + replaceContent(node, start, end, createCodeLink(urlFull, text, "commit")) + node = node.NextSibling.NextSibling + } } // emojiShortCodeProcessor for rendering text like :smile: into emoji func emojiShortCodeProcessor(ctx *RenderContext, node *html.Node) { - m := EmojiShortCodeRegex.FindStringSubmatchIndex(node.Data) - if m == nil { - return - } - - alias := node.Data[m[0]:m[1]] - alias = strings.ReplaceAll(alias, ":", "") - converted := emoji.FromAlias(alias) - if converted == nil { - // check if this is a custom reaction - s := strings.Join(setting.UI.Reactions, " ") + "gitea" - if strings.Contains(s, alias) { - replaceContent(node, m[0], m[1], createCustomEmoji(alias, "emoji")) + start := 0 + next := node.NextSibling + for node != nil && node != next && start < len(node.Data) { + m := EmojiShortCodeRegex.FindStringSubmatchIndex(node.Data[start:]) + if m == nil { return } - return - } + m[0] += start + m[1] += start + + start = m[1] + + alias := node.Data[m[0]:m[1]] + alias = strings.ReplaceAll(alias, ":", "") + converted := emoji.FromAlias(alias) + if converted == nil { + // check if this is a custom reaction + if _, exist := setting.UI.CustomEmojisMap[alias]; exist { + replaceContent(node, m[0], m[1], createCustomEmoji(alias)) + node = node.NextSibling.NextSibling + start = 0 + continue + } + continue + } - replaceContent(node, m[0], m[1], createEmoji(converted.Emoji, "emoji", converted.Description)) + replaceContent(node, m[0], m[1], createEmoji(converted.Emoji, "emoji", converted.Description)) + node = node.NextSibling.NextSibling + start = 0 + } } // emoji processor to match emoji and add emoji class func emojiProcessor(ctx *RenderContext, node *html.Node) { - m := emoji.FindEmojiSubmatchIndex(node.Data) - if m == nil { - return - } - - codepoint := node.Data[m[0]:m[1]] - val := emoji.FromCode(codepoint) - if val != nil { - replaceContent(node, m[0], m[1], createEmoji(codepoint, "emoji", val.Description)) + start := 0 + next := node.NextSibling + for node != nil && node != next && start < len(node.Data) { + m := emoji.FindEmojiSubmatchIndex(node.Data[start:]) + if m == nil { + return + } + m[0] += start + m[1] += start + + codepoint := node.Data[m[0]:m[1]] + start = m[1] + val := emoji.FromCode(codepoint) + if val != nil { + replaceContent(node, m[0], m[1], createEmoji(codepoint, "emoji", val.Description)) + node = node.NextSibling.NextSibling + start = 0 + } } } @@ -947,49 +987,91 @@ func sha1CurrentPatternProcessor(ctx *RenderContext, node *html.Node) { if ctx.Metas == nil || ctx.Metas["user"] == "" || ctx.Metas["repo"] == "" || ctx.Metas["repoPath"] == "" { return } - m := sha1CurrentPattern.FindStringSubmatchIndex(node.Data) - if m == nil { - return + + start := 0 + next := node.NextSibling + if ctx.ShaExistCache == nil { + ctx.ShaExistCache = make(map[string]bool) } - hash := node.Data[m[2]:m[3]] - // The regex does not lie, it matches the hash pattern. - // However, a regex cannot know if a hash actually exists or not. - // We could assume that a SHA1 hash should probably contain alphas AND numerics - // but that is not always the case. - // Although unlikely, deadbeef and 1234567 are valid short forms of SHA1 hash - // as used by git and github for linking and thus we have to do similar. - // Because of this, we check to make sure that a matched hash is actually - // a commit in the repository before making it a link. - if _, err := git.NewCommand("rev-parse", "--verify", hash).RunInDirBytes(ctx.Metas["repoPath"]); err != nil { - if !strings.Contains(err.Error(), "fatal: Needed a single revision") { - log.Debug("sha1CurrentPatternProcessor git rev-parse: %v", err) + for node != nil && node != next && start < len(node.Data) { + m := sha1CurrentPattern.FindStringSubmatchIndex(node.Data[start:]) + if m == nil { + return + } + m[2] += start + m[3] += start + + hash := node.Data[m[2]:m[3]] + // The regex does not lie, it matches the hash pattern. + // However, a regex cannot know if a hash actually exists or not. + // We could assume that a SHA1 hash should probably contain alphas AND numerics + // but that is not always the case. + // Although unlikely, deadbeef and 1234567 are valid short forms of SHA1 hash + // as used by git and github for linking and thus we have to do similar. + // Because of this, we check to make sure that a matched hash is actually + // a commit in the repository before making it a link. + + // check cache first + exist, inCache := ctx.ShaExistCache[hash] + if !inCache { + if ctx.GitRepo == nil { + var err error + ctx.GitRepo, err = git.OpenRepository(ctx.Metas["repoPath"]) + if err != nil { + log.Error("unable to open repository: %s Error: %v", ctx.Metas["repoPath"], err) + return + } + ctx.AddCancel(func() { + ctx.GitRepo.Close() + ctx.GitRepo = nil + }) + } + + exist = ctx.GitRepo.IsObjectExist(hash) + ctx.ShaExistCache[hash] = exist + } + + if !exist { + start = m[3] + continue } - return - } - replaceContent(node, m[2], m[3], - createCodeLink(util.URLJoin(setting.AppURL, ctx.Metas["user"], ctx.Metas["repo"], "commit", hash), base.ShortSha(hash), "commit")) + replaceContent(node, m[2], m[3], + createCodeLink(util.URLJoin(setting.AppURL, ctx.Metas["user"], ctx.Metas["repo"], "commit", hash), base.ShortSha(hash), "commit")) + start = 0 + node = node.NextSibling.NextSibling + } } // emailAddressProcessor replaces raw email addresses with a mailto: link. func emailAddressProcessor(ctx *RenderContext, node *html.Node) { - m := emailRegex.FindStringSubmatchIndex(node.Data) - if m == nil { - return + next := node.NextSibling + for node != nil && node != next { + m := emailRegex.FindStringSubmatchIndex(node.Data) + if m == nil { + return + } + + mail := node.Data[m[2]:m[3]] + replaceContent(node, m[2], m[3], createLink("mailto:"+mail, mail, "mailto")) + node = node.NextSibling.NextSibling } - mail := node.Data[m[2]:m[3]] - replaceContent(node, m[2], m[3], createLink("mailto:"+mail, mail, "mailto")) } // linkProcessor creates links for any HTTP or HTTPS URL not captured by // markdown. func linkProcessor(ctx *RenderContext, node *html.Node) { - m := common.LinkRegex.FindStringIndex(node.Data) - if m == nil { - return + next := node.NextSibling + for node != nil && node != next { + m := common.LinkRegex.FindStringIndex(node.Data) + if m == nil { + return + } + + uri := node.Data[m[0]:m[1]] + replaceContent(node, m[0], m[1], createLink(uri, uri, "link")) + node = node.NextSibling.NextSibling } - uri := node.Data[m[0]:m[1]] - replaceContent(node, m[0], m[1], createLink(uri, uri, "link")) } func genDefaultLinkProcessor(defaultLink string) processor { @@ -1013,12 +1095,17 @@ func genDefaultLinkProcessor(defaultLink string) processor { // descriptionLinkProcessor creates links for DescriptionHTML func descriptionLinkProcessor(ctx *RenderContext, node *html.Node) { - m := common.LinkRegex.FindStringIndex(node.Data) - if m == nil { - return + next := node.NextSibling + for node != nil && node != next { + m := common.LinkRegex.FindStringIndex(node.Data) + if m == nil { + return + } + + uri := node.Data[m[0]:m[1]] + replaceContent(node, m[0], m[1], createDescriptionLink(uri, uri)) + node = node.NextSibling.NextSibling } - uri := node.Data[m[0]:m[1]] - replaceContent(node, m[0], m[1], createDescriptionLink(uri, uri)) } func createDescriptionLink(href, content string) *html.Node { diff --git a/modules/markup/html_test.go b/modules/markup/html_test.go index 3425c3d3a887..dff9102beddb 100644 --- a/modules/markup/html_test.go +++ b/modules/markup/html_test.go @@ -112,7 +112,7 @@ func TestRender_links(t *testing.T) { defaultCustom := setting.Markdown.CustomURLSchemes setting.Markdown.CustomURLSchemes = []string{"ftp", "magnet"} - ReplaceSanitizer() + InitializeSanitizer() CustomLinkURLSchemes(setting.Markdown.CustomURLSchemes) test( @@ -138,13 +138,13 @@ func TestRender_links(t *testing.T) { `

http://www.example.com/wpstyle/?p=364

`) test( "https://www.example.com/foo/?bar=baz&inga=42&quux", - `

https://www.example.com/foo/?bar=baz&inga=42&quux

`) + `

https://www.example.com/foo/?bar=baz&inga=42&quux

`) test( "http://142.42.1.1/", `

http://142.42.1.1/

`) test( "https://github.com/go-gitea/gitea/?p=aaa/bbb.html#ccc-ddd", - `

https://github.com/go-gitea/gitea/?p=aaa/bbb.html#ccc-ddd

`) + `

https://github.com/go-gitea/gitea/?p=aaa/bbb.html#ccc-ddd

`) test( "https://en.wikipedia.org/wiki/URL_(disambiguation)", `

https://en.wikipedia.org/wiki/URL_(disambiguation)

`) @@ -162,7 +162,7 @@ func TestRender_links(t *testing.T) { `

ftp://gitea.com/file.txt

`) test( "magnet:?xt=urn:btih:5dee65101db281ac9c46344cd6b175cdcadabcde&dn=download", - `

magnet:?xt=urn:btih:5dee65101db281ac9c46344cd6b175cdcadabcde&dn=download

`) + `

magnet:?xt=urn:btih:5dee65101db281ac9c46344cd6b175cdcadabcde&dn=download

`) // Test that should *not* be turned into URL test( @@ -192,7 +192,7 @@ func TestRender_links(t *testing.T) { // Restore previous settings setting.Markdown.CustomURLSchemes = defaultCustom - ReplaceSanitizer() + InitializeSanitizer() CustomLinkURLSchemes(setting.Markdown.CustomURLSchemes) } @@ -283,8 +283,19 @@ func TestRender_emoji(t *testing.T) { //Text that should be turned into or recognized as emoji test( ":gitea:", - `

:gitea:

`) - + `

:gitea:

`) + test( + ":custom-emoji:", + `

:custom-emoji:

`) + setting.UI.CustomEmojisMap["custom-emoji"] = ":custom-emoji:" + test( + ":custom-emoji:", + `

:custom-emoji:

`) + test( + "这是字符:1::+1: some🐊 \U0001f44d:custom-emoji: :gitea:", + `

这是字符:1:👍 some🐊 `+ + `👍:custom-emoji: `+ + `:gitea:

`) test( "Some text with 😄 in the middle", `

Some text with 😄 in the middle

`) @@ -414,6 +425,41 @@ func TestRender_ShortLinks(t *testing.T) { `

[[foobar]]

`) } +func TestRender_RelativeImages(t *testing.T) { + setting.AppURL = AppURL + setting.AppSubURL = AppSubURL + tree := util.URLJoin(AppSubURL, "src", "master") + + test := func(input, expected, expectedWiki string) { + buffer, err := markdown.RenderString(&RenderContext{ + URLPrefix: tree, + Metas: localMetas, + }, input) + assert.NoError(t, err) + assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(buffer)) + buffer, err = markdown.RenderString(&RenderContext{ + URLPrefix: setting.AppSubURL, + Metas: localMetas, + IsWiki: true, + }, input) + assert.NoError(t, err) + assert.Equal(t, strings.TrimSpace(expectedWiki), strings.TrimSpace(buffer)) + } + + rawwiki := util.URLJoin(AppSubURL, "wiki", "raw") + mediatree := util.URLJoin(AppSubURL, "media", "master") + + test( + ``, + ``, + ``) + + test( + ``, + ``, + ``) +} + func Test_ParseClusterFuzz(t *testing.T) { setting.AppURL = AppURL setting.AppSubURL = AppSubURL @@ -444,3 +490,39 @@ func Test_ParseClusterFuzz(t *testing.T) { assert.NoError(t, err) assert.NotContains(t, res.String(), "` + + var res strings.Builder + err := PostProcess(&RenderContext{ + URLPrefix: "https://example.com", + Metas: localMetas, + }, strings.NewReader(data), &res) + assert.NoError(t, err) + assert.Equal(t, data, res.String()) +} + +func BenchmarkEmojiPostprocess(b *testing.B) { + data := "🥰 " + for len(data) < 1<<16 { + data += data + } + b.ResetTimer() + for i := 0; i < b.N; i++ { + var res strings.Builder + err := PostProcess(&RenderContext{ + URLPrefix: "https://example.com", + Metas: localMetas, + }, strings.NewReader(data), &res) + assert.NoError(b, err) + } +} diff --git a/modules/markup/markdown/ast.go b/modules/markup/markdown/ast.go index d735ff5ebd84..5191d94cdd85 100644 --- a/modules/markup/markdown/ast.go +++ b/modules/markup/markdown/ast.go @@ -74,7 +74,7 @@ func IsSummary(node ast.Node) bool { return ok } -// TaskCheckBoxListItem is a block that repressents a list item of a markdown block with a checkbox +// TaskCheckBoxListItem is a block that represents a list item of a markdown block with a checkbox type TaskCheckBoxListItem struct { *ast.ListItem IsChecked bool diff --git a/modules/markup/markdown/goldmark.go b/modules/markup/markdown/goldmark.go index ad77177db439..f1c259f82429 100644 --- a/modules/markup/markdown/goldmark.go +++ b/modules/markup/markdown/goldmark.go @@ -384,18 +384,19 @@ func (r *HTMLRenderer) renderTaskCheckBoxListItem(w util.BufWriter, source []byt } else { _, _ = w.WriteString("
  • ") } - end := ">" - if r.XHTML { - end = " />" + _, _ = w.WriteString(` 0 { + segment := segments.At(0) + _, _ = w.WriteString(fmt.Sprintf(` data-source-position="%d"`, segment.Start)) } - var err error if n.IsChecked { - _, err = w.WriteString(``) + } else { + _ = w.WriteByte('>') } fc := n.FirstChild() if fc != nil { diff --git a/modules/markup/markdown/markdown.go b/modules/markup/markdown/markdown.go index 87fae2a23b2f..cac2a180faee 100644 --- a/modules/markup/markdown/markdown.go +++ b/modules/markup/markdown/markdown.go @@ -199,7 +199,7 @@ func actualRender(ctx *markup.RenderContext, input io.Reader, output io.Writer) } _ = lw.Close() }() - buf := markup.SanitizeReader(rd) + buf := markup.SanitizeReader(rd, "") _, err := io.Copy(output, buf) return err } @@ -215,7 +215,7 @@ func render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error if log.IsDebug() { log.Debug("Panic in markdown: %v\n%s", err, string(log.Stack(2))) } - ret := markup.SanitizeReader(input) + ret := markup.SanitizeReader(input, "") _, err = io.Copy(output, ret) if err != nil { log.Error("SanitizeReader failed: %v", err) @@ -249,6 +249,11 @@ func (Renderer) Extensions() []string { return setting.Markdown.FileExtensions } +// SanitizerRules implements markup.Renderer +func (Renderer) SanitizerRules() []setting.MarkupSanitizerRule { + return []setting.MarkupSanitizerRule{} +} + // Render implements markup.Renderer func (Renderer) Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error { return render(ctx, input, output) diff --git a/modules/markup/markdown/markdown_test.go b/modules/markup/markdown/markdown_test.go index 5997dbccdcf9..76c6d28d07b7 100644 --- a/modules/markup/markdown/markdown_test.go +++ b/modules/markup/markdown/markdown_test.go @@ -166,9 +166,9 @@ func testAnswers(baseURLContent, baseURLImages string) []string {

    (from https://www.markdownguide.org/extended-syntax/)

    Checkboxes

      -
    • unchecked
    • -
    • checked
    • -
    • still unchecked
    • +
    • unchecked
    • +
    • checked
    • +
    • still unchecked

    Definition list

    @@ -269,6 +269,9 @@ Here is a simple footnote,[^1] and here is a longer one.[^bignote] } func TestTotal_RenderWiki(t *testing.T) { + setting.AppURL = AppURL + setting.AppSubURL = AppSubURL + answers := testAnswers(util.URLJoin(AppSubURL, "wiki/"), util.URLJoin(AppSubURL, "wiki", "raw/")) for i := 0; i < len(sameCases); i++ { @@ -305,6 +308,9 @@ func TestTotal_RenderWiki(t *testing.T) { } func TestTotal_RenderString(t *testing.T) { + setting.AppURL = AppURL + setting.AppSubURL = AppSubURL + answers := testAnswers(util.URLJoin(AppSubURL, "src", "master/"), util.URLJoin(AppSubURL, "raw", "master/")) for i := 0; i < len(sameCases); i++ { diff --git a/modules/markup/markdown/meta_test.go b/modules/markup/markdown/meta_test.go index a585f0382f81..f525777a54c1 100644 --- a/modules/markup/markdown/meta_test.go +++ b/modules/markup/markdown/meta_test.go @@ -18,7 +18,7 @@ func TestExtractMetadata(t *testing.T) { var meta structs.IssueTemplate body, err := ExtractMetadata(fmt.Sprintf("%s\n%s\n%s\n%s", sepTest, frontTest, sepTest, bodyTest), &meta) assert.NoError(t, err) - assert.Equal(t, body, bodyTest) + assert.Equal(t, bodyTest, body) assert.Equal(t, metaTest, meta) assert.True(t, meta.Valid()) }) @@ -39,7 +39,7 @@ func TestExtractMetadata(t *testing.T) { var meta structs.IssueTemplate body, err := ExtractMetadata(fmt.Sprintf("%s\n%s\n%s", sepTest, frontTest, sepTest), &meta) assert.NoError(t, err) - assert.Equal(t, body, "") + assert.Equal(t, "", body) assert.Equal(t, metaTest, meta) assert.True(t, meta.Valid()) }) diff --git a/modules/markup/mdstripper/mdstripper_test.go b/modules/markup/mdstripper/mdstripper_test.go index 9efcc359496c..13cea0ff72a1 100644 --- a/modules/markup/mdstripper/mdstripper_test.go +++ b/modules/markup/mdstripper/mdstripper_test.go @@ -25,7 +25,7 @@ func TestMarkdownStripper(t *testing.T) { This is [one](link) to paradise. This **is emphasized**. -This: should coallesce. +This: should coalesce. ` + "```" + ` This is a code block. @@ -44,7 +44,7 @@ A HIDDEN ` + "`" + `GHOST` + "`" + ` IN THIS LINE. "This", "is emphasized", ".", - "This: should coallesce.", + "This: should coalesce.", "Bullet 1", "Bullet 2", "A HIDDEN", diff --git a/modules/markup/orgmode/orgmode.go b/modules/markup/orgmode/orgmode.go index 96e67f90cfa2..7e9f1f45c5a7 100644 --- a/modules/markup/orgmode/orgmode.go +++ b/modules/markup/orgmode/orgmode.go @@ -11,9 +11,13 @@ import ( "io" "strings" + "code.gitea.io/gitea/modules/highlight" "code.gitea.io/gitea/modules/markup" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" + "github.com/alecthomas/chroma" + "github.com/alecthomas/chroma/lexers" "github.com/niklasfasching/go-org/org" ) @@ -38,9 +42,55 @@ func (Renderer) Extensions() []string { return []string{".org"} } +// SanitizerRules implements markup.Renderer +func (Renderer) SanitizerRules() []setting.MarkupSanitizerRule { + return []setting.MarkupSanitizerRule{} +} + // Render renders orgmode rawbytes to HTML func Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error { htmlWriter := org.NewHTMLWriter() + htmlWriter.HighlightCodeBlock = func(source, lang string, inline bool) string { + var w strings.Builder + if _, err := w.WriteString(`
    `); err != nil {
    +			return ""
    +		}
    +
    +		lexer := lexers.Get(lang)
    +		if lexer == nil && lang == "" {
    +			lexer = lexers.Analyse(source)
    +			if lexer == nil {
    +				lexer = lexers.Fallback
    +			}
    +			lang = strings.ToLower(lexer.Config().Name)
    +		}
    +
    +		if lexer == nil {
    +			// include language-x class as part of commonmark spec
    +			if _, err := w.WriteString(``); err != nil {
    +				return ""
    +			}
    +			if _, err := w.WriteString(html.EscapeString(source)); err != nil {
    +				return ""
    +			}
    +		} else {
    +			// include language-x class as part of commonmark spec
    +			if _, err := w.WriteString(``); err != nil {
    +				return ""
    +			}
    +			lexer = chroma.Coalesce(lexer)
    +
    +			if _, err := w.WriteString(highlight.Code(lexer.Config().Filenames[0], source)); err != nil {
    +				return ""
    +			}
    +		}
    +
    +		if _, err := w.WriteString("
    "); err != nil { + return "" + } + + return w.String() + } w := &Writer{ HTMLWriter: htmlWriter, diff --git a/modules/markup/renderer.go b/modules/markup/renderer.go index 7cc81574ba0e..04619caee335 100644 --- a/modules/markup/renderer.go +++ b/modules/markup/renderer.go @@ -13,6 +13,7 @@ import ( "strings" "sync" + "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/setting" ) @@ -35,19 +36,52 @@ func Init() { // RenderContext represents a render context type RenderContext struct { - Ctx context.Context - Filename string - Type string - IsWiki bool - URLPrefix string - Metas map[string]string - DefaultLink string + Ctx context.Context + Filename string + Type string + IsWiki bool + URLPrefix string + Metas map[string]string + DefaultLink string + GitRepo *git.Repository + ShaExistCache map[string]bool + cancelFn func() +} + +// Cancel runs any cleanup functions that have been registered for this Ctx +func (ctx *RenderContext) Cancel() { + if ctx == nil { + return + } + ctx.ShaExistCache = map[string]bool{} + if ctx.cancelFn == nil { + return + } + ctx.cancelFn() +} + +// AddCancel adds the provided fn as a Cleanup for this Ctx +func (ctx *RenderContext) AddCancel(fn func()) { + if ctx == nil { + return + } + oldCancelFn := ctx.cancelFn + if oldCancelFn == nil { + ctx.cancelFn = fn + return + } + ctx.cancelFn = func() { + defer oldCancelFn() + fn() + } } // Renderer defines an interface for rendering markup file to HTML type Renderer interface { Name() string // markup format name Extensions() []string + NeedPostProcess() bool + SanitizerRules() []setting.MarkupSanitizerRule Render(ctx *RenderContext, input io.Reader, output io.Writer) error } @@ -94,7 +128,7 @@ func RenderString(ctx *RenderContext, content string) (string, error) { return buf.String(), nil } -func render(ctx *RenderContext, parser Renderer, input io.Reader, output io.Writer) error { +func render(ctx *RenderContext, renderer Renderer, input io.Reader, output io.Writer) error { var wg sync.WaitGroup var err error pr, pw := io.Pipe() @@ -111,7 +145,7 @@ func render(ctx *RenderContext, parser Renderer, input io.Reader, output io.Writ wg.Add(1) go func() { - buf := SanitizeReader(pr2) + buf := SanitizeReader(pr2, renderer.Name()) _, err = io.Copy(output, buf) _ = pr2.Close() wg.Done() @@ -119,13 +153,17 @@ func render(ctx *RenderContext, parser Renderer, input io.Reader, output io.Writ wg.Add(1) go func() { - err = PostProcess(ctx, pr, pw2) + if renderer.NeedPostProcess() { + err = PostProcess(ctx, pr, pw2) + } else { + _, err = io.Copy(pw2, pr) + } _ = pr.Close() _ = pw2.Close() wg.Done() }() - if err1 := parser.Render(ctx, input, pw); err1 != nil { + if err1 := renderer.Render(ctx, input, pw); err1 != nil { return err1 } _ = pw.Close() diff --git a/modules/markup/sanitizer.go b/modules/markup/sanitizer.go index 9f336d8330d0..9342d65de581 100644 --- a/modules/markup/sanitizer.go +++ b/modules/markup/sanitizer.go @@ -19,8 +19,9 @@ import ( // Sanitizer is a protection wrapper of *bluemonday.Policy which does not allow // any modification to the underlying policies once it's been created. type Sanitizer struct { - policy *bluemonday.Policy - init sync.Once + defaultPolicy *bluemonday.Policy + rendererPolicies map[string]*bluemonday.Policy + init sync.Once } var sanitizer = &Sanitizer{} @@ -30,50 +31,57 @@ var sanitizer = &Sanitizer{} // entire application lifecycle. func NewSanitizer() { sanitizer.init.Do(func() { - ReplaceSanitizer() + InitializeSanitizer() }) } -// ReplaceSanitizer replaces the current sanitizer to account for changes in settings -func ReplaceSanitizer() { - sanitizer.policy = bluemonday.UGCPolicy() +// InitializeSanitizer (re)initializes the current sanitizer to account for changes in settings +func InitializeSanitizer() { + sanitizer.rendererPolicies = map[string]*bluemonday.Policy{} + sanitizer.defaultPolicy = createDefaultPolicy() + + for name, renderer := range renderers { + sanitizerRules := renderer.SanitizerRules() + if len(sanitizerRules) > 0 { + policy := createDefaultPolicy() + addSanitizerRules(policy, sanitizerRules) + sanitizer.rendererPolicies[name] = policy + } + } +} + +func createDefaultPolicy() *bluemonday.Policy { + policy := bluemonday.UGCPolicy() // For Chroma markdown plugin - sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`^is-loading$`)).OnElements("pre") - sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`^(chroma )?language-[\w-]+$`)).OnElements("code") + policy.AllowAttrs("class").Matching(regexp.MustCompile(`^is-loading$`)).OnElements("pre") + policy.AllowAttrs("class").Matching(regexp.MustCompile(`^(chroma )?language-[\w-]+$`)).OnElements("code") // Checkboxes - sanitizer.policy.AllowAttrs("type").Matching(regexp.MustCompile(`^checkbox$`)).OnElements("input") - sanitizer.policy.AllowAttrs("checked", "disabled").OnElements("input") + policy.AllowAttrs("type").Matching(regexp.MustCompile(`^checkbox$`)).OnElements("input") + policy.AllowAttrs("checked", "disabled", "data-source-position").OnElements("input") // Custom URL-Schemes if len(setting.Markdown.CustomURLSchemes) > 0 { - sanitizer.policy.AllowURLSchemes(setting.Markdown.CustomURLSchemes...) + policy.AllowURLSchemes(setting.Markdown.CustomURLSchemes...) } - // Allow keyword markup - sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`^` + keywordClass + `$`)).OnElements("span") - // Allow classes for anchors - sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`ref-issue`)).OnElements("a") + policy.AllowAttrs("class").Matching(regexp.MustCompile(`ref-issue`)).OnElements("a") // Allow classes for task lists - sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`task-list-item`)).OnElements("li") + policy.AllowAttrs("class").Matching(regexp.MustCompile(`task-list-item`)).OnElements("li") // Allow icons - sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`^icon(\s+[\p{L}\p{N}_-]+)+$`)).OnElements("i") + policy.AllowAttrs("class").Matching(regexp.MustCompile(`^icon(\s+[\p{L}\p{N}_-]+)+$`)).OnElements("i") // Allow unlabelled labels - sanitizer.policy.AllowNoAttrs().OnElements("label") + policy.AllowNoAttrs().OnElements("label") // Allow classes for emojis - sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`emoji`)).OnElements("img") - - // Allow icons, emojis, and chroma syntax on span - sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`^((icon(\s+[\p{L}\p{N}_-]+)+)|(emoji))$|^([a-z][a-z0-9]{0,2})$`)).OnElements("span") + policy.AllowAttrs("class").Matching(regexp.MustCompile(`emoji`)).OnElements("img") - // Allow data tables - sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`data-table`)).OnElements("table") - sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`line-num`)).OnElements("th", "td") + // Allow icons, emojis, chroma syntax and keyword markup on span + policy.AllowAttrs("class").Matching(regexp.MustCompile(`^((icon(\s+[\p{L}\p{N}_-]+)+)|(emoji))$|^([a-z][a-z0-9]{0,2})$|^` + keywordClass + `$`)).OnElements("span") // Allow generally safe attributes generalSafeAttrs := []string{"abbr", "accept", "accept-charset", @@ -104,18 +112,29 @@ func ReplaceSanitizer() { "abbr", "bdo", "cite", "dfn", "mark", "small", "span", "time", "wbr", } - sanitizer.policy.AllowAttrs(generalSafeAttrs...).OnElements(generalSafeElements...) + policy.AllowAttrs(generalSafeAttrs...).OnElements(generalSafeElements...) - sanitizer.policy.AllowAttrs("itemscope", "itemtype").OnElements("div") + policy.AllowAttrs("itemscope", "itemtype").OnElements("div") // FIXME: Need to handle longdesc in img but there is no easy way to do it // Custom keyword markup - for _, rule := range setting.ExternalSanitizerRules { - if rule.Regexp != nil { - sanitizer.policy.AllowAttrs(rule.AllowAttr).Matching(rule.Regexp).OnElements(rule.Element) - } else { - sanitizer.policy.AllowAttrs(rule.AllowAttr).OnElements(rule.Element) + addSanitizerRules(policy, setting.ExternalSanitizerRules) + + return policy +} + +func addSanitizerRules(policy *bluemonday.Policy, rules []setting.MarkupSanitizerRule) { + for _, rule := range rules { + if rule.AllowDataURIImages { + policy.AllowDataURIImages() + } + if rule.Element != "" { + if rule.Regexp != nil { + policy.AllowAttrs(rule.AllowAttr).Matching(rule.Regexp).OnElements(rule.Element) + } else { + policy.AllowAttrs(rule.AllowAttr).OnElements(rule.Element) + } } } } @@ -123,21 +142,15 @@ func ReplaceSanitizer() { // Sanitize takes a string that contains a HTML fragment or document and applies policy whitelist. func Sanitize(s string) string { NewSanitizer() - return sanitizer.policy.Sanitize(s) + return sanitizer.defaultPolicy.Sanitize(s) } // SanitizeReader sanitizes a Reader -func SanitizeReader(r io.Reader) *bytes.Buffer { +func SanitizeReader(r io.Reader, renderer string) *bytes.Buffer { NewSanitizer() - return sanitizer.policy.SanitizeReader(r) -} - -// SanitizeBytes takes a []byte slice that contains a HTML fragment or document and applies policy whitelist. -func SanitizeBytes(b []byte) []byte { - if len(b) == 0 { - // nothing to sanitize - return b + policy, exist := sanitizer.rendererPolicies[renderer] + if !exist { + policy = sanitizer.defaultPolicy } - NewSanitizer() - return sanitizer.policy.SanitizeBytes(b) + return policy.SanitizeReader(r) } diff --git a/modules/markup/sanitizer_test.go b/modules/markup/sanitizer_test.go index 9e173015d661..64189e143523 100644 --- a/modules/markup/sanitizer_test.go +++ b/modules/markup/sanitizer_test.go @@ -49,7 +49,6 @@ func Test_Sanitizer(t *testing.T) { for i := 0; i < len(testCases); i += 2 { assert.Equal(t, testCases[i+1], Sanitize(testCases[i])) - assert.Equal(t, testCases[i+1], string(SanitizeBytes([]byte(testCases[i])))) } } diff --git a/modules/migrations/base/downloader.go b/modules/migrations/base/downloader.go index 919f4b52a05f..71c8f3eaf978 100644 --- a/modules/migrations/base/downloader.go +++ b/modules/migrations/base/downloader.go @@ -11,7 +11,14 @@ import ( "code.gitea.io/gitea/modules/structs" ) -// Downloader downloads the site repo informations +// GetCommentOptions represents an options for get comment +type GetCommentOptions struct { + IssueNumber int64 + Page int + PageSize int +} + +// Downloader downloads the site repo information type Downloader interface { SetContext(context.Context) GetRepoInfo() (*Repository, error) @@ -20,7 +27,8 @@ type Downloader interface { GetReleases() ([]*Release, error) GetLabels() ([]*Label, error) GetIssues(page, perPage int) ([]*Issue, bool, error) - GetComments(issueNumber int64) ([]*Comment, error) + GetComments(opts GetCommentOptions) ([]*Comment, bool, error) + SupportGetRepoComments() bool GetPullRequests(page, perPage int) ([]*PullRequest, bool, error) GetReviews(pullRequestNumber int64) ([]*Review, error) FormatCloneURL(opts MigrateOptions, remoteAddr string) (string, error) diff --git a/modules/migrations/base/label.go b/modules/migrations/base/label.go index 0c86b547f194..5a66e7620f62 100644 --- a/modules/migrations/base/label.go +++ b/modules/migrations/base/label.go @@ -5,7 +5,7 @@ package base -// Label defines a standard label informations +// Label defines a standard label information type Label struct { Name string Color string diff --git a/modules/migrations/base/messenger.go b/modules/migrations/base/messenger.go new file mode 100644 index 000000000000..a92f59ef7fae --- /dev/null +++ b/modules/migrations/base/messenger.go @@ -0,0 +1,11 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package base + +// Messenger is a formatting function similar to i18n.Tr +type Messenger func(key string, args ...interface{}) + +// NilMessenger represents an empty formatting function +func NilMessenger(string, ...interface{}) {} diff --git a/modules/migrations/base/null_downloader.go b/modules/migrations/base/null_downloader.go index a93c20339b60..53a536709d1f 100644 --- a/modules/migrations/base/null_downloader.go +++ b/modules/migrations/base/null_downloader.go @@ -51,8 +51,8 @@ func (n NullDownloader) GetIssues(page, perPage int) ([]*Issue, bool, error) { } // GetComments returns comments according issueNumber -func (n NullDownloader) GetComments(issueNumber int64) ([]*Comment, error) { - return nil, &ErrNotSupported{Entity: "Comments"} +func (n NullDownloader) GetComments(GetCommentOptions) ([]*Comment, bool, error) { + return nil, false, &ErrNotSupported{Entity: "Comments"} } // GetPullRequests returns pull requests according page and perPage @@ -80,3 +80,8 @@ func (n NullDownloader) FormatCloneURL(opts MigrateOptions, remoteAddr string) ( } return remoteAddr, nil } + +// SupportGetRepoComments return true if it supports get repo comments +func (n NullDownloader) SupportGetRepoComments() bool { + return false +} diff --git a/modules/migrations/base/options.go b/modules/migrations/base/options.go index f1d9f81e575b..b12e1f94aa4a 100644 --- a/modules/migrations/base/options.go +++ b/modules/migrations/base/options.go @@ -11,10 +11,13 @@ import "code.gitea.io/gitea/modules/structs" // this is for internal usage by migrations module and func who interact with it type MigrateOptions struct { // required: true - CloneAddr string `json:"clone_addr" binding:"Required"` - AuthUsername string `json:"auth_username"` - AuthPassword string `json:"auth_password"` - AuthToken string `json:"auth_token"` + CloneAddr string `json:"clone_addr" binding:"Required"` + CloneAddrEncrypted string `json:"clone_addr_encrypted,omitempty"` + AuthUsername string `json:"auth_username"` + AuthPassword string `json:"-"` + AuthPasswordEncrypted string `json:"auth_password_encrypted,omitempty"` + AuthToken string `json:"-"` + AuthTokenEncrypted string `json:"auth_token_encrypted,omitempty"` // required: true UID int `json:"uid" binding:"Required"` // required: true diff --git a/modules/migrations/base/retry_downloader.go b/modules/migrations/base/retry_downloader.go index eeb3cabbc18f..e6c80038f181 100644 --- a/modules/migrations/base/retry_downloader.go +++ b/modules/migrations/base/retry_downloader.go @@ -31,217 +31,167 @@ func NewRetryDownloader(ctx context.Context, downloader Downloader, retryTimes, } } -// SetContext set context -func (d *RetryDownloader) SetContext(ctx context.Context) { - d.ctx = ctx - d.Downloader.SetContext(ctx) -} - -// GetRepoInfo returns a repository information with retry -func (d *RetryDownloader) GetRepoInfo() (*Repository, error) { +func (d *RetryDownloader) retry(work func() error) error { var ( times = d.RetryTimes - repo *Repository err error ) for ; times > 0; times-- { - if repo, err = d.Downloader.GetRepoInfo(); err == nil { - return repo, nil + if err = work(); err == nil { + return nil } if IsErrNotSupported(err) { - return nil, err + return err } select { case <-d.ctx.Done(): - return nil, d.ctx.Err() + return d.ctx.Err() case <-time.After(time.Second * time.Duration(d.RetryDelay)): } } - return nil, err + return err +} + +// SetContext set context +func (d *RetryDownloader) SetContext(ctx context.Context) { + d.ctx = ctx + d.Downloader.SetContext(ctx) +} + +// GetRepoInfo returns a repository information with retry +func (d *RetryDownloader) GetRepoInfo() (*Repository, error) { + var ( + repo *Repository + err error + ) + + err = d.retry(func() error { + repo, err = d.Downloader.GetRepoInfo() + return err + }) + + return repo, err } // GetTopics returns a repository's topics with retry func (d *RetryDownloader) GetTopics() ([]string, error) { var ( - times = d.RetryTimes topics []string err error ) - for ; times > 0; times-- { - if topics, err = d.Downloader.GetTopics(); err == nil { - return topics, nil - } - if IsErrNotSupported(err) { - return nil, err - } - select { - case <-d.ctx.Done(): - return nil, d.ctx.Err() - case <-time.After(time.Second * time.Duration(d.RetryDelay)): - } - } - return nil, err + + err = d.retry(func() error { + topics, err = d.Downloader.GetTopics() + return err + }) + + return topics, err } // GetMilestones returns a repository's milestones with retry func (d *RetryDownloader) GetMilestones() ([]*Milestone, error) { var ( - times = d.RetryTimes milestones []*Milestone err error ) - for ; times > 0; times-- { - if milestones, err = d.Downloader.GetMilestones(); err == nil { - return milestones, nil - } - if IsErrNotSupported(err) { - return nil, err - } - select { - case <-d.ctx.Done(): - return nil, d.ctx.Err() - case <-time.After(time.Second * time.Duration(d.RetryDelay)): - } - } - return nil, err + + err = d.retry(func() error { + milestones, err = d.Downloader.GetMilestones() + return err + }) + + return milestones, err } // GetReleases returns a repository's releases with retry func (d *RetryDownloader) GetReleases() ([]*Release, error) { var ( - times = d.RetryTimes releases []*Release err error ) - for ; times > 0; times-- { - if releases, err = d.Downloader.GetReleases(); err == nil { - return releases, nil - } - if IsErrNotSupported(err) { - return nil, err - } - select { - case <-d.ctx.Done(): - return nil, d.ctx.Err() - case <-time.After(time.Second * time.Duration(d.RetryDelay)): - } - } - return nil, err + + err = d.retry(func() error { + releases, err = d.Downloader.GetReleases() + return err + }) + + return releases, err } // GetLabels returns a repository's labels with retry func (d *RetryDownloader) GetLabels() ([]*Label, error) { var ( - times = d.RetryTimes labels []*Label err error ) - for ; times > 0; times-- { - if labels, err = d.Downloader.GetLabels(); err == nil { - return labels, nil - } - if IsErrNotSupported(err) { - return nil, err - } - select { - case <-d.ctx.Done(): - return nil, d.ctx.Err() - case <-time.After(time.Second * time.Duration(d.RetryDelay)): - } - } - return nil, err + + err = d.retry(func() error { + labels, err = d.Downloader.GetLabels() + return err + }) + + return labels, err } // GetIssues returns a repository's issues with retry func (d *RetryDownloader) GetIssues(page, perPage int) ([]*Issue, bool, error) { var ( - times = d.RetryTimes issues []*Issue isEnd bool err error ) - for ; times > 0; times-- { - if issues, isEnd, err = d.Downloader.GetIssues(page, perPage); err == nil { - return issues, isEnd, nil - } - if IsErrNotSupported(err) { - return nil, false, err - } - select { - case <-d.ctx.Done(): - return nil, false, d.ctx.Err() - case <-time.After(time.Second * time.Duration(d.RetryDelay)): - } - } - return nil, false, err + + err = d.retry(func() error { + issues, isEnd, err = d.Downloader.GetIssues(page, perPage) + return err + }) + + return issues, isEnd, err } // GetComments returns a repository's comments with retry -func (d *RetryDownloader) GetComments(issueNumber int64) ([]*Comment, error) { +func (d *RetryDownloader) GetComments(opts GetCommentOptions) ([]*Comment, bool, error) { var ( - times = d.RetryTimes comments []*Comment + isEnd bool err error ) - for ; times > 0; times-- { - if comments, err = d.Downloader.GetComments(issueNumber); err == nil { - return comments, nil - } - if IsErrNotSupported(err) { - return nil, err - } - select { - case <-d.ctx.Done(): - return nil, d.ctx.Err() - case <-time.After(time.Second * time.Duration(d.RetryDelay)): - } - } - return nil, err + + err = d.retry(func() error { + comments, isEnd, err = d.Downloader.GetComments(opts) + return err + }) + + return comments, isEnd, err } // GetPullRequests returns a repository's pull requests with retry func (d *RetryDownloader) GetPullRequests(page, perPage int) ([]*PullRequest, bool, error) { var ( - times = d.RetryTimes prs []*PullRequest err error isEnd bool ) - for ; times > 0; times-- { - if prs, isEnd, err = d.Downloader.GetPullRequests(page, perPage); err == nil { - return prs, isEnd, nil - } - if IsErrNotSupported(err) { - return nil, false, err - } - select { - case <-d.ctx.Done(): - return nil, false, d.ctx.Err() - case <-time.After(time.Second * time.Duration(d.RetryDelay)): - } - } - return nil, false, err + + err = d.retry(func() error { + prs, isEnd, err = d.Downloader.GetPullRequests(page, perPage) + return err + }) + + return prs, isEnd, err } // GetReviews returns pull requests reviews func (d *RetryDownloader) GetReviews(pullRequestNumber int64) ([]*Review, error) { var ( - times = d.RetryTimes reviews []*Review err error ) - for ; times > 0; times-- { - if reviews, err = d.Downloader.GetReviews(pullRequestNumber); err == nil { - return reviews, nil - } - if IsErrNotSupported(err) { - return nil, err - } - select { - case <-d.ctx.Done(): - return nil, d.ctx.Err() - case <-time.After(time.Second * time.Duration(d.RetryDelay)): - } - } - return nil, err + + err = d.retry(func() error { + reviews, err = d.Downloader.GetReviews(pullRequestNumber) + return err + }) + + return reviews, err } diff --git a/modules/migrations/base/uploader.go b/modules/migrations/base/uploader.go index dfcf81d05224..4d0257df37b0 100644 --- a/modules/migrations/base/uploader.go +++ b/modules/migrations/base/uploader.go @@ -5,7 +5,7 @@ package base -// Uploader uploads all the informations of one repository +// Uploader uploads all the information of one repository type Uploader interface { MaxBatchInsertSize(tp string) int CreateRepo(repo *Repository, opts MigrateOptions) error diff --git a/modules/migrations/dump.go b/modules/migrations/dump.go index 297095883be4..6c4cf174d4fc 100644 --- a/modules/migrations/dump.go +++ b/modules/migrations/dump.go @@ -13,6 +13,7 @@ import ( "os" "path/filepath" "strconv" + "strings" "time" "code.gitea.io/gitea/models" @@ -554,7 +555,7 @@ func DumpRepository(ctx context.Context, baseDir, ownerName string, opts base.Mi return err } - if err := migrateRepository(downloader, uploader, opts); err != nil { + if err := migrateRepository(downloader, uploader, opts, nil); err != nil { if err1 := uploader.Rollback(); err1 != nil { log.Error("rollback failed: %v", err1) } @@ -563,8 +564,42 @@ func DumpRepository(ctx context.Context, baseDir, ownerName string, opts base.Mi return nil } +func updateOptionsUnits(opts *base.MigrateOptions, units []string) { + if len(units) == 0 { + opts.Wiki = true + opts.Issues = true + opts.Milestones = true + opts.Labels = true + opts.Releases = true + opts.Comments = true + opts.PullRequests = true + opts.ReleaseAssets = true + } else { + for _, unit := range units { + switch strings.ToLower(unit) { + case "wiki": + opts.Wiki = true + case "issues": + opts.Issues = true + case "milestones": + opts.Milestones = true + case "labels": + opts.Labels = true + case "releases": + opts.Releases = true + case "release_assets": + opts.ReleaseAssets = true + case "comments": + opts.Comments = true + case "pull_requests": + opts.PullRequests = true + } + } + } +} + // RestoreRepository restore a repository from the disk directory -func RestoreRepository(ctx context.Context, baseDir string, ownerName, repoName string) error { +func RestoreRepository(ctx context.Context, baseDir string, ownerName, repoName string, units []string) error { doer, err := models.GetAdminUser() if err != nil { return err @@ -580,17 +615,12 @@ func RestoreRepository(ctx context.Context, baseDir string, ownerName, repoName } tp, _ := strconv.Atoi(opts["service_type"]) - if err = migrateRepository(downloader, uploader, base.MigrateOptions{ - Wiki: true, - Issues: true, - Milestones: true, - Labels: true, - Releases: true, - Comments: true, - PullRequests: true, - ReleaseAssets: true, + var migrateOpts = base.MigrateOptions{ GitServiceType: structs.GitServiceType(tp), - }); err != nil { + } + updateOptionsUnits(&migrateOpts, units) + + if err = migrateRepository(downloader, uploader, migrateOpts, nil); err != nil { if err1 := uploader.Rollback(); err1 != nil { log.Error("rollback failed: %v", err1) } diff --git a/modules/migrations/gitea_downloader.go b/modules/migrations/gitea_downloader.go index 40820ae3759c..665466ffeffd 100644 --- a/modules/migrations/gitea_downloader.go +++ b/modules/migrations/gitea_downloader.go @@ -435,37 +435,37 @@ func (g *GiteaDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, err } // GetComments returns comments according issueNumber -func (g *GiteaDownloader) GetComments(index int64) ([]*base.Comment, error) { +func (g *GiteaDownloader) GetComments(opts base.GetCommentOptions) ([]*base.Comment, bool, error) { var allComments = make([]*base.Comment, 0, g.maxPerPage) // for i := 1; ; i++ { // make sure gitea can shutdown gracefully select { case <-g.ctx.Done(): - return nil, nil + return nil, false, nil default: } - comments, _, err := g.client.ListIssueComments(g.repoOwner, g.repoName, index, gitea_sdk.ListIssueCommentOptions{ListOptions: gitea_sdk.ListOptions{ + comments, _, err := g.client.ListIssueComments(g.repoOwner, g.repoName, opts.IssueNumber, gitea_sdk.ListIssueCommentOptions{ListOptions: gitea_sdk.ListOptions{ // PageSize: g.maxPerPage, // Page: i, }}) if err != nil { - return nil, fmt.Errorf("error while listing comments for issue #%d. Error: %v", index, err) + return nil, false, fmt.Errorf("error while listing comments for issue #%d. Error: %v", opts.IssueNumber, err) } for _, comment := range comments { reactions, err := g.getCommentReactions(comment.ID) if err != nil { - log.Warn("Unable to load comment reactions during migrating issue #%d for comment %d to %s/%s. Error: %v", index, comment.ID, g.repoOwner, g.repoName, err) + log.Warn("Unable to load comment reactions during migrating issue #%d for comment %d to %s/%s. Error: %v", opts.IssueNumber, comment.ID, g.repoOwner, g.repoName, err) if err2 := models.CreateRepositoryNotice( - fmt.Sprintf("Unable to load reactions during migrating issue #%d for comment %d to %s/%s. Error: %v", index, comment.ID, g.repoOwner, g.repoName, err)); err2 != nil { + fmt.Sprintf("Unable to load reactions during migrating issue #%d for comment %d to %s/%s. Error: %v", opts.IssueNumber, comment.ID, g.repoOwner, g.repoName, err)); err2 != nil { log.Error("create repository notice failed: ", err2) } } allComments = append(allComments, &base.Comment{ - IssueIndex: index, + IssueIndex: opts.IssueNumber, PosterID: comment.Poster.ID, PosterName: comment.Poster.UserName, PosterEmail: comment.Poster.Email, @@ -481,7 +481,7 @@ func (g *GiteaDownloader) GetComments(index int64) ([]*base.Comment, error) { // break // } //} - return allComments, nil + return allComments, true, nil } // GetPullRequests returns pull requests according page and perPage diff --git a/modules/migrations/gitea_downloader_test.go b/modules/migrations/gitea_downloader_test.go index c52c1225f401..f62b19897c63 100644 --- a/modules/migrations/gitea_downloader_test.go +++ b/modules/migrations/gitea_downloader_test.go @@ -158,13 +158,13 @@ func TestGiteaDownloadRepo(t *testing.T) { issues, isEnd, err := downloader.GetIssues(1, 50) assert.NoError(t, err) - assert.EqualValues(t, 7, len(issues)) + assert.Len(t, issues, 7) assert.True(t, isEnd) assert.EqualValues(t, "open", issues[0].State) issues, isEnd, err = downloader.GetIssues(3, 2) assert.NoError(t, err) - assert.EqualValues(t, 2, len(issues)) + assert.Len(t, issues, 2) assert.False(t, isEnd) var ( @@ -224,7 +224,9 @@ func TestGiteaDownloadRepo(t *testing.T) { Closed: &closed2, }, issues[1]) - comments, err := downloader.GetComments(4) + comments, _, err := downloader.GetComments(base.GetCommentOptions{ + IssueNumber: 4, + }) assert.NoError(t, err) assert.Len(t, comments, 2) assert.EqualValues(t, 1598975370, comments[0].Created.Unix()) diff --git a/modules/migrations/gitea_uploader.go b/modules/migrations/gitea_uploader.go index bd6084d6a167..2b18098b7f16 100644 --- a/modules/migrations/gitea_uploader.go +++ b/modules/migrations/gitea_uploader.go @@ -250,14 +250,16 @@ func (g *GiteaLocalUploader) CreateReleases(releases ...*base.Release) error { rel.OriginalAuthorID = release.PublisherID } - // calc NumCommits - commit, err := g.gitRepo.GetCommit(rel.TagName) - if err != nil { - return fmt.Errorf("GetCommit: %v", err) - } - rel.NumCommits, err = commit.CommitsCount() - if err != nil { - return fmt.Errorf("CommitsCount: %v", err) + // calc NumCommits if no draft + if !release.Draft { + commit, err := g.gitRepo.GetTagCommit(rel.TagName) + if err != nil { + return fmt.Errorf("GetCommit: %v", err) + } + rel.NumCommits, err = commit.CommitsCount() + if err != nil { + return fmt.Errorf("CommitsCount: %v", err) + } } for _, asset := range release.Assets { @@ -270,22 +272,26 @@ func (g *GiteaLocalUploader) CreateReleases(releases ...*base.Release) error { } // download attachment - err = func() error { + err := func() error { // asset.DownloadURL maybe a local file var rc io.ReadCloser - if asset.DownloadURL == nil { + var err error + if asset.DownloadFunc != nil { rc, err = asset.DownloadFunc() if err != nil { return err } - } else { + } else if asset.DownloadURL != nil { rc, err = uri.Open(*asset.DownloadURL) if err != nil { return err } } - defer rc.Close() + if rc == nil { + return nil + } _, err = storage.Attachments.Save(attach.RelativePath(), rc, int64(*asset.Size)) + rc.Close() return err }() if err != nil { @@ -851,6 +857,7 @@ func (g *GiteaLocalUploader) CreateReviews(reviews ...*base.Review) error { // Rollback when migrating failed, this will rollback all the changes. func (g *GiteaLocalUploader) Rollback() error { if g.repo != nil && g.repo.ID > 0 { + g.gitRepo.Close() if err := models.DeleteRepository(g.doer, g.repo.OwnerID, g.repo.ID); err != nil { return err } diff --git a/modules/migrations/gitea_uploader_test.go b/modules/migrations/gitea_uploader_test.go index 3c7def467582..5f36d545846d 100644 --- a/modules/migrations/gitea_uploader_test.go +++ b/modules/migrations/gitea_uploader_test.go @@ -47,7 +47,7 @@ func TestGiteaUploadRepo(t *testing.T) { PullRequests: true, Private: true, Mirror: false, - }) + }, nil) assert.NoError(t, err) repo := models.AssertExistsAndLoadBean(t, &models.Repository{OwnerID: user.ID, Name: repoName}).(*models.Repository) @@ -59,18 +59,18 @@ func TestGiteaUploadRepo(t *testing.T) { State: structs.StateOpen, }) assert.NoError(t, err) - assert.EqualValues(t, 1, len(milestones)) + assert.Len(t, milestones, 1) milestones, err = models.GetMilestones(models.GetMilestonesOption{ RepoID: repo.ID, State: structs.StateClosed, }) assert.NoError(t, err) - assert.EqualValues(t, 0, len(milestones)) + assert.Empty(t, milestones) labels, err := models.GetLabelsByRepoID(repo.ID, "", models.ListOptions{}) assert.NoError(t, err) - assert.EqualValues(t, 11, len(labels)) + assert.Len(t, labels, 11) releases, err := models.GetReleasesByRepoID(repo.ID, models.FindReleasesOptions{ ListOptions: models.ListOptions{ @@ -80,7 +80,7 @@ func TestGiteaUploadRepo(t *testing.T) { IncludeTags: true, }) assert.NoError(t, err) - assert.EqualValues(t, 8, len(releases)) + assert.Len(t, releases, 8) releases, err = models.GetReleasesByRepoID(repo.ID, models.FindReleasesOptions{ ListOptions: models.ListOptions{ @@ -90,7 +90,7 @@ func TestGiteaUploadRepo(t *testing.T) { IncludeTags: false, }) assert.NoError(t, err) - assert.EqualValues(t, 1, len(releases)) + assert.Len(t, releases, 1) issues, err := models.Issues(&models.IssuesOptions{ RepoIDs: []int64{repo.ID}, @@ -98,16 +98,16 @@ func TestGiteaUploadRepo(t *testing.T) { SortType: "oldest", }) assert.NoError(t, err) - assert.EqualValues(t, 14, len(issues)) + assert.Len(t, issues, 14) assert.NoError(t, issues[0].LoadDiscussComments()) - assert.EqualValues(t, 0, len(issues[0].Comments)) + assert.Empty(t, issues[0].Comments) pulls, _, err := models.PullRequests(repo.ID, &models.PullRequestsOptions{ SortType: "oldest", }) assert.NoError(t, err) - assert.EqualValues(t, 34, len(pulls)) + assert.Len(t, pulls, 34) assert.NoError(t, pulls[0].LoadIssue()) assert.NoError(t, pulls[0].Issue.LoadDiscussComments()) - assert.EqualValues(t, 2, len(pulls[0].Issue.Comments)) + assert.Len(t, pulls[0].Issue.Comments, 2) } diff --git a/modules/migrations/github.go b/modules/migrations/github.go index 282e3b478615..7d4c492c24c3 100644 --- a/modules/migrations/github.go +++ b/modules/migrations/github.go @@ -11,6 +11,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "time" @@ -60,7 +61,7 @@ func (f *GithubDownloaderV3Factory) GitServiceType() structs.GitServiceType { return structs.GithubService } -// GithubDownloaderV3 implements a Downloader interface to get repository informations +// GithubDownloaderV3 implements a Downloader interface to get repository information // from github via APIv3 type GithubDownloaderV3 struct { base.NullDownloader @@ -264,34 +265,29 @@ func (g *GithubDownloaderV3) GetLabels() ([]*base.Label, error) { } func (g *GithubDownloaderV3) convertGithubRelease(rel *github.RepositoryRelease) *base.Release { - var ( - name string - desc string - ) - if rel.Body != nil { - desc = *rel.Body - } - if rel.Name != nil { - name = *rel.Name - } - - var email string - if rel.Author.Email != nil { - email = *rel.Author.Email - } - r := &base.Release{ TagName: *rel.TagName, TargetCommitish: *rel.TargetCommitish, - Name: name, - Body: desc, Draft: *rel.Draft, Prerelease: *rel.Prerelease, Created: rel.CreatedAt.Time, PublisherID: *rel.Author.ID, PublisherName: *rel.Author.Login, - PublisherEmail: email, - Published: rel.PublishedAt.Time, + } + + if rel.Body != nil { + r.Body = *rel.Body + } + if rel.Name != nil { + r.Name = *rel.Name + } + + if rel.Author.Email != nil { + r.PublisherEmail = *rel.Author.Email + } + + if rel.PublishedAt != nil { + r.Published = rel.PublishedAt.Time } for _, asset := range rel.Assets { @@ -306,18 +302,17 @@ func (g *GithubDownloaderV3) convertGithubRelease(rel *github.RepositoryRelease) Updated: asset.UpdatedAt.Time, DownloadFunc: func() (io.ReadCloser, error) { g.sleep() - asset, redir, err := g.client.Repositories.DownloadReleaseAsset(g.ctx, g.repoOwner, g.repoName, assetID, nil) + asset, redirectURL, err := g.client.Repositories.DownloadReleaseAsset(g.ctx, g.repoOwner, g.repoName, assetID, nil) if err != nil { return nil, err } - err = g.RefreshRate() - if err != nil { + if err := g.RefreshRate(); err != nil { log.Error("g.client.RateLimits: %s", err) } if asset == nil { - if redir != "" { + if redirectURL != "" { g.sleep() - req, err := http.NewRequestWithContext(g.ctx, "GET", redir, nil) + req, err := http.NewRequestWithContext(g.ctx, "GET", redirectURL, nil) if err != nil { return nil, err } @@ -456,8 +451,22 @@ func (g *GithubDownloaderV3) GetIssues(page, perPage int) ([]*base.Issue, bool, return allIssues, len(issues) < perPage, nil } +// SupportGetRepoComments return true if it supports get repo comments +func (g *GithubDownloaderV3) SupportGetRepoComments() bool { + return true +} + // GetComments returns comments according issueNumber -func (g *GithubDownloaderV3) GetComments(issueNumber int64) ([]*base.Comment, error) { +func (g *GithubDownloaderV3) GetComments(opts base.GetCommentOptions) ([]*base.Comment, bool, error) { + if opts.IssueNumber > 0 { + comments, err := g.getComments(opts.IssueNumber) + return comments, false, err + } + + return g.GetAllComments(opts.Page, opts.PageSize) +} + +func (g *GithubDownloaderV3) getComments(issueNumber int64) ([]*base.Comment, error) { var ( allComments = make([]*base.Comment, 0, g.maxPerPage) created = "created" @@ -525,6 +534,75 @@ func (g *GithubDownloaderV3) GetComments(issueNumber int64) ([]*base.Comment, er return allComments, nil } +// GetAllComments returns repository comments according page and perPageSize +func (g *GithubDownloaderV3) GetAllComments(page, perPage int) ([]*base.Comment, bool, error) { + var ( + allComments = make([]*base.Comment, 0, perPage) + created = "created" + asc = "asc" + ) + opt := &github.IssueListCommentsOptions{ + Sort: &created, + Direction: &asc, + ListOptions: github.ListOptions{ + Page: page, + PerPage: perPage, + }, + } + + g.sleep() + comments, resp, err := g.client.Issues.ListComments(g.ctx, g.repoOwner, g.repoName, 0, opt) + if err != nil { + return nil, false, fmt.Errorf("error while listing repos: %v", err) + } + log.Trace("Request get comments %d/%d, but in fact get %d", perPage, page, len(comments)) + g.rate = &resp.Rate + for _, comment := range comments { + var email string + if comment.User.Email != nil { + email = *comment.User.Email + } + + // get reactions + var reactions []*base.Reaction + for i := 1; ; i++ { + g.sleep() + res, resp, err := g.client.Reactions.ListIssueCommentReactions(g.ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListOptions{ + Page: i, + PerPage: g.maxPerPage, + }) + if err != nil { + return nil, false, err + } + g.rate = &resp.Rate + if len(res) == 0 { + break + } + for _, reaction := range res { + reactions = append(reactions, &base.Reaction{ + UserID: reaction.User.GetID(), + UserName: reaction.User.GetLogin(), + Content: reaction.GetContent(), + }) + } + } + idx := strings.LastIndex(*comment.IssueURL, "/") + issueIndex, _ := strconv.ParseInt((*comment.IssueURL)[idx+1:], 10, 64) + allComments = append(allComments, &base.Comment{ + IssueIndex: issueIndex, + PosterID: *comment.User.ID, + PosterName: *comment.User.Login, + PosterEmail: email, + Content: *comment.Body, + Created: *comment.CreatedAt, + Updated: *comment.UpdatedAt, + Reactions: reactions, + }) + } + + return allComments, len(allComments) < perPage, nil +} + // GetPullRequests returns pull requests according page and perPage func (g *GithubDownloaderV3) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) { if perPage > g.maxPerPage { @@ -545,6 +623,7 @@ func (g *GithubDownloaderV3) GetPullRequests(page, perPage int) ([]*base.PullReq if err != nil { return nil, false, fmt.Errorf("error while listing repos: %v", err) } + log.Trace("Request get pull requests %d/%d, but in fact get %d", perPage, page, len(prs)) g.rate = &resp.Rate for _, pr := range prs { var body string diff --git a/modules/migrations/github_test.go b/modules/migrations/github_test.go index efa8b6ba9bbb..e0ee2fea8447 100644 --- a/modules/migrations/github_test.go +++ b/modules/migrations/github_test.go @@ -147,7 +147,7 @@ func TestGitHubDownloadRepo(t *testing.T) { // downloader.GetIssues() issues, isEnd, err := downloader.GetIssues(1, 2) assert.NoError(t, err) - assert.EqualValues(t, 2, len(issues)) + assert.Len(t, issues, 2) assert.False(t, isEnd) var ( @@ -240,9 +240,11 @@ func TestGitHubDownloadRepo(t *testing.T) { }, issues) // downloader.GetComments() - comments, err := downloader.GetComments(2) + comments, _, err := downloader.GetComments(base.GetCommentOptions{ + IssueNumber: 2, + }) assert.NoError(t, err) - assert.EqualValues(t, 2, len(comments)) + assert.Len(t, comments, 2) assert.EqualValues(t, []*base.Comment{ { IssueIndex: 2, @@ -273,7 +275,7 @@ func TestGitHubDownloadRepo(t *testing.T) { // downloader.GetPullRequests() prs, _, err := downloader.GetPullRequests(1, 2) assert.NoError(t, err) - assert.EqualValues(t, 2, len(prs)) + assert.Len(t, prs, 2) closed1 = time.Date(2019, 11, 12, 21, 39, 27, 0, time.UTC) var merged1 = time.Date(2019, 11, 12, 21, 39, 27, 0, time.UTC) diff --git a/modules/migrations/gitlab.go b/modules/migrations/gitlab.go index a697075ff892..fe763f990033 100644 --- a/modules/migrations/gitlab.go +++ b/modules/migrations/gitlab.go @@ -56,7 +56,7 @@ func (f *GitlabDownloaderFactory) GitServiceType() structs.GitServiceType { return structs.GitlabService } -// GitlabDownloader implements a Downloader interface to get repository informations +// GitlabDownloader implements a Downloader interface to get repository information // from gitlab via go-gitlab // - issueCount is incremented in GetIssues() to ensure PR and Issue numbers do not overlap, // because Gitlab has individual Issue and Pull Request numbers. @@ -430,7 +430,8 @@ func (g *GitlabDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, er // GetComments returns comments according issueNumber // TODO: figure out how to transfer comment reactions -func (g *GitlabDownloader) GetComments(issueNumber int64) ([]*base.Comment, error) { +func (g *GitlabDownloader) GetComments(opts base.GetCommentOptions) ([]*base.Comment, bool, error) { + var issueNumber = opts.IssueNumber var allComments = make([]*base.Comment, 0, g.maxPerPage) var page = 1 @@ -457,7 +458,7 @@ func (g *GitlabDownloader) GetComments(issueNumber int64) ([]*base.Comment, erro } if err != nil { - return nil, fmt.Errorf("error while listing comments: %v %v", g.repoID, err) + return nil, false, fmt.Errorf("error while listing comments: %v %v", g.repoID, err) } for _, comment := range comments { // Flatten comment threads @@ -490,7 +491,7 @@ func (g *GitlabDownloader) GetComments(issueNumber int64) ([]*base.Comment, erro } page = resp.NextPage } - return allComments, nil + return allComments, true, nil } // GetPullRequests returns pull requests according page and perPage diff --git a/modules/migrations/gitlab_test.go b/modules/migrations/gitlab_test.go index f64d72147cf5..6a77ff3c230a 100644 --- a/modules/migrations/gitlab_test.go +++ b/modules/migrations/gitlab_test.go @@ -115,7 +115,7 @@ func TestGitlabDownloadRepo(t *testing.T) { issues, isEnd, err := downloader.GetIssues(1, 2) assert.NoError(t, err) - assert.EqualValues(t, 2, len(issues)) + assert.Len(t, issues, 2) assert.False(t, isEnd) var ( @@ -204,9 +204,11 @@ func TestGitlabDownloadRepo(t *testing.T) { }, }, issues) - comments, err := downloader.GetComments(2) + comments, _, err := downloader.GetComments(base.GetCommentOptions{ + IssueNumber: 2, + }) assert.NoError(t, err) - assert.EqualValues(t, 4, len(comments)) + assert.Len(t, comments, 4) assert.EqualValues(t, []*base.Comment{ { IssueIndex: 2, diff --git a/modules/migrations/gogs.go b/modules/migrations/gogs.go index b616907938ff..9e663fd1fee4 100644 --- a/modules/migrations/gogs.go +++ b/modules/migrations/gogs.go @@ -57,7 +57,7 @@ func (f *GogsDownloaderFactory) GitServiceType() structs.GitServiceType { return structs.GogsService } -// GogsDownloader implements a Downloader interface to get repository informations +// GogsDownloader implements a Downloader interface to get repository information // from gogs via API type GogsDownloader struct { base.NullDownloader @@ -227,12 +227,13 @@ func (g *GogsDownloader) getIssues(page int, state string) ([]*base.Issue, bool, } // GetComments returns comments according issueNumber -func (g *GogsDownloader) GetComments(issueNumber int64) ([]*base.Comment, error) { +func (g *GogsDownloader) GetComments(opts base.GetCommentOptions) ([]*base.Comment, bool, error) { + var issueNumber = opts.IssueNumber var allComments = make([]*base.Comment, 0, 100) comments, err := g.client.ListIssueComments(g.repoOwner, g.repoName, issueNumber) if err != nil { - return nil, fmt.Errorf("error while listing repos: %v", err) + return nil, false, fmt.Errorf("error while listing repos: %v", err) } for _, comment := range comments { if len(comment.Body) == 0 || comment.Poster == nil { @@ -249,7 +250,7 @@ func (g *GogsDownloader) GetComments(issueNumber int64) ([]*base.Comment, error) }) } - return allComments, nil + return allComments, true, nil } // GetTopics return repository topics diff --git a/modules/migrations/gogs_test.go b/modules/migrations/gogs_test.go index c240ae6432ac..4e384036d702 100644 --- a/modules/migrations/gogs_test.go +++ b/modules/migrations/gogs_test.go @@ -80,7 +80,7 @@ func TestGogsDownloadRepo(t *testing.T) { // downloader.GetIssues() issues, isEnd, err := downloader.GetIssues(1, 8) assert.NoError(t, err) - assert.EqualValues(t, 1, len(issues)) + assert.Len(t, issues, 1) assert.False(t, isEnd) assert.EqualValues(t, []*base.Issue{ @@ -103,9 +103,11 @@ func TestGogsDownloadRepo(t *testing.T) { }, issues) // downloader.GetComments() - comments, err := downloader.GetComments(1) + comments, _, err := downloader.GetComments(base.GetCommentOptions{ + IssueNumber: 1, + }) assert.NoError(t, err) - assert.EqualValues(t, 1, len(comments)) + assert.Len(t, comments, 1) assert.EqualValues(t, []*base.Comment{ { PosterName: "lunny", diff --git a/modules/migrations/migrate.go b/modules/migrations/migrate.go index 2f8889e67b53..0a507d9c3341 100644 --- a/modules/migrations/migrate.go +++ b/modules/migrations/migrate.go @@ -99,7 +99,7 @@ func IsMigrateURLAllowed(remoteURL string, doer *models.User) error { } // MigrateRepository migrate repository according MigrateOptions -func MigrateRepository(ctx context.Context, doer *models.User, ownerName string, opts base.MigrateOptions) (*models.Repository, error) { +func MigrateRepository(ctx context.Context, doer *models.User, ownerName string, opts base.MigrateOptions, messenger base.Messenger) (*models.Repository, error) { err := IsMigrateURLAllowed(opts.CloneAddr, doer) if err != nil { return nil, err @@ -118,7 +118,7 @@ func MigrateRepository(ctx context.Context, doer *models.User, ownerName string, var uploader = NewGiteaLocalUploader(ctx, doer, ownerName, opts.RepoName) uploader.gitServiceType = opts.GitServiceType - if err := migrateRepository(downloader, uploader, opts); err != nil { + if err := migrateRepository(downloader, uploader, opts, messenger); err != nil { if err1 := uploader.Rollback(); err1 != nil { log.Error("rollback failed: %v", err1) } @@ -167,7 +167,11 @@ func newDownloader(ctx context.Context, ownerName string, opts base.MigrateOptio // migrateRepository will download information and then upload it to Uploader, this is a simple // process for small repository. For a big repository, save all the data to disk // before upload is better -func migrateRepository(downloader base.Downloader, uploader base.Uploader, opts base.MigrateOptions) error { +func migrateRepository(downloader base.Downloader, uploader base.Uploader, opts base.MigrateOptions, messenger base.Messenger) error { + if messenger == nil { + messenger = base.NilMessenger + } + repo, err := downloader.GetRepoInfo() if err != nil { if !base.IsErrNotSupported(err) { @@ -184,13 +188,15 @@ func migrateRepository(downloader base.Downloader, uploader base.Uploader, opts return err } - log.Trace("migrating git data") + log.Trace("migrating git data from %s", repo.CloneURL) + messenger("repo.migrate.migrating_git") if err = uploader.CreateRepo(repo, opts); err != nil { return err } defer uploader.Close() log.Trace("migrating topics") + messenger("repo.migrate.migrating_topics") topics, err := downloader.GetTopics() if err != nil { if !base.IsErrNotSupported(err) { @@ -206,6 +212,7 @@ func migrateRepository(downloader base.Downloader, uploader base.Uploader, opts if opts.Milestones { log.Trace("migrating milestones") + messenger("repo.migrate.migrating_milestones") milestones, err := downloader.GetMilestones() if err != nil { if !base.IsErrNotSupported(err) { @@ -229,6 +236,7 @@ func migrateRepository(downloader base.Downloader, uploader base.Uploader, opts if opts.Labels { log.Trace("migrating labels") + messenger("repo.migrate.migrating_labels") labels, err := downloader.GetLabels() if err != nil { if !base.IsErrNotSupported(err) { @@ -252,6 +260,7 @@ func migrateRepository(downloader base.Downloader, uploader base.Uploader, opts if opts.Releases { log.Trace("migrating releases") + messenger("repo.migrate.migrating_releases") releases, err := downloader.GetReleases() if err != nil { if !base.IsErrNotSupported(err) { @@ -283,8 +292,11 @@ func migrateRepository(downloader base.Downloader, uploader base.Uploader, opts reviewBatchSize = uploader.MaxBatchInsertSize("review") ) + supportAllComments := downloader.SupportGetRepoComments() + if opts.Issues { log.Trace("migrating issues and comments") + messenger("repo.migrate.migrating_issues") var issueBatchSize = uploader.MaxBatchInsertSize("issue") for i := 1; ; i++ { @@ -301,11 +313,13 @@ func migrateRepository(downloader base.Downloader, uploader base.Uploader, opts return err } - if opts.Comments { + if opts.Comments && !supportAllComments { var allComments = make([]*base.Comment, 0, commentBatchSize) for _, issue := range issues { log.Trace("migrating issue %d's comments", issue.Number) - comments, err := downloader.GetComments(issue.Number) + comments, _, err := downloader.GetComments(base.GetCommentOptions{ + IssueNumber: issue.Number, + }) if err != nil { if !base.IsErrNotSupported(err) { return err @@ -339,6 +353,7 @@ func migrateRepository(downloader base.Downloader, uploader base.Uploader, opts if opts.PullRequests { log.Trace("migrating pull requests and comments") + messenger("repo.migrate.migrating_pulls") var prBatchSize = uploader.MaxBatchInsertSize("pullrequest") for i := 1; ; i++ { prs, isEnd, err := downloader.GetPullRequests(i, prBatchSize) @@ -355,30 +370,34 @@ func migrateRepository(downloader base.Downloader, uploader base.Uploader, opts } if opts.Comments { - // plain comments - var allComments = make([]*base.Comment, 0, commentBatchSize) - for _, pr := range prs { - log.Trace("migrating pull request %d's comments", pr.Number) - comments, err := downloader.GetComments(pr.Number) - if err != nil { - if !base.IsErrNotSupported(err) { - return err + if !supportAllComments { + // plain comments + var allComments = make([]*base.Comment, 0, commentBatchSize) + for _, pr := range prs { + log.Trace("migrating pull request %d's comments", pr.Number) + comments, _, err := downloader.GetComments(base.GetCommentOptions{ + IssueNumber: pr.Number, + }) + if err != nil { + if !base.IsErrNotSupported(err) { + return err + } + log.Warn("migrating comments is not supported, ignored") } - log.Warn("migrating comments is not supported, ignored") - } - allComments = append(allComments, comments...) + allComments = append(allComments, comments...) - if len(allComments) >= commentBatchSize { - if err = uploader.CreateComments(allComments[:commentBatchSize]...); err != nil { - return err + if len(allComments) >= commentBatchSize { + if err = uploader.CreateComments(allComments[:commentBatchSize]...); err != nil { + return err + } + allComments = allComments[commentBatchSize:] } - allComments = allComments[commentBatchSize:] } - } - if len(allComments) > 0 { - if err = uploader.CreateComments(allComments...); err != nil { - return err + if len(allComments) > 0 { + if err = uploader.CreateComments(allComments...); err != nil { + return err + } } } @@ -428,6 +447,27 @@ func migrateRepository(downloader base.Downloader, uploader base.Uploader, opts } } + if opts.Comments && supportAllComments { + log.Trace("migrating comments") + for i := 1; ; i++ { + comments, isEnd, err := downloader.GetComments(base.GetCommentOptions{ + Page: i, + PageSize: commentBatchSize, + }) + if err != nil { + return err + } + + if err := uploader.CreateComments(comments...); err != nil { + return err + } + + if isEnd { + break + } + } + } + return uploader.Finish() } diff --git a/modules/migrations/restore.go b/modules/migrations/restore.go index 4e63df14292a..6177f80cbbca 100644 --- a/modules/migrations/restore.go +++ b/modules/migrations/restore.go @@ -83,7 +83,7 @@ func (r *RepositoryRestorer) GetRepoInfo() (*base.Repository, error) { IsPrivate: isPrivate, Description: opts["description"], OriginalURL: opts["original_url"], - CloneURL: opts["clone_addr"], + CloneURL: filepath.Join(r.baseDir, "git"), DefaultBranch: opts["default_branch"], }, nil } @@ -155,7 +155,9 @@ func (r *RepositoryRestorer) GetReleases() ([]*base.Release, error) { } for _, rel := range releases { for _, asset := range rel.Assets { - *asset.DownloadURL = "file://" + filepath.Join(r.baseDir, *asset.DownloadURL) + if asset.DownloadURL != nil { + *asset.DownloadURL = "file://" + filepath.Join(r.baseDir, *asset.DownloadURL) + } } } return releases, nil @@ -210,27 +212,27 @@ func (r *RepositoryRestorer) GetIssues(page, perPage int) ([]*base.Issue, bool, } // GetComments returns comments according issueNumber -func (r *RepositoryRestorer) GetComments(issueNumber int64) ([]*base.Comment, error) { +func (r *RepositoryRestorer) GetComments(opts base.GetCommentOptions) ([]*base.Comment, bool, error) { var comments = make([]*base.Comment, 0, 10) - p := filepath.Join(r.commentDir(), fmt.Sprintf("%d.yml", issueNumber)) + p := filepath.Join(r.commentDir(), fmt.Sprintf("%d.yml", opts.IssueNumber)) _, err := os.Stat(p) if err != nil { if os.IsNotExist(err) { - return nil, nil + return nil, false, nil } - return nil, err + return nil, false, err } bs, err := ioutil.ReadFile(p) if err != nil { - return nil, err + return nil, false, err } err = yaml.Unmarshal(bs, &comments) if err != nil { - return nil, err + return nil, false, err } - return comments, nil + return comments, false, nil } // GetPullRequests returns pull requests according page and perPage diff --git a/modules/nosql/manager_redis.go b/modules/nosql/manager_redis.go index d754a0e07d9c..b4852cecc849 100644 --- a/modules/nosql/manager_redis.go +++ b/modules/nosql/manager_redis.go @@ -152,7 +152,7 @@ func (m *Manager) GetRedisClient(connection string) redis.UniversalClient { opts.Addrs = append(opts.Addrs, strings.Split(uri.Host, ",")...) } if uri.Path != "" { - if db, err := strconv.Atoi(uri.Path); err == nil { + if db, err := strconv.Atoi(uri.Path[1:]); err == nil { opts.DB = db } } @@ -168,7 +168,7 @@ func (m *Manager) GetRedisClient(connection string) redis.UniversalClient { opts.Addrs = append(opts.Addrs, strings.Split(uri.Host, ",")...) } if uri.Path != "" { - if db, err := strconv.Atoi(uri.Path); err == nil { + if db, err := strconv.Atoi(uri.Path[1:]); err == nil { opts.DB = db } } @@ -186,7 +186,7 @@ func (m *Manager) GetRedisClient(connection string) redis.UniversalClient { opts.Addrs = append(opts.Addrs, strings.Split(uri.Host, ",")...) } if uri.Path != "" { - if db, err := strconv.Atoi(uri.Path); err == nil { + if db, err := strconv.Atoi(uri.Path[1:]); err == nil { opts.DB = db } } diff --git a/modules/notification/base/null.go b/modules/notification/base/null.go index e61b37a94309..32fe259bca8d 100644 --- a/modules/notification/base/null.go +++ b/modules/notification/base/null.go @@ -143,7 +143,7 @@ func (*NullNotifier) NotifyPushCommits(pusher *models.User, repo *models.Reposit func (*NullNotifier) NotifyCreateRef(doer *models.User, repo *models.Repository, refType, refFullName string) { } -// NotifyDeleteRef notifies branch or tag deleteion to notifiers +// NotifyDeleteRef notifies branch or tag deletion to notifiers func (*NullNotifier) NotifyDeleteRef(doer *models.User, repo *models.Repository, refType, refFullName string) { } diff --git a/modules/notification/mail/mail.go b/modules/notification/mail/mail.go index eb45409faf27..5bfb0b3ef8bb 100644 --- a/modules/notification/mail/mail.go +++ b/modules/notification/mail/mail.go @@ -54,7 +54,6 @@ func (m *mailNotifier) NotifyNewIssue(issue *models.Issue, mentions []*models.Us func (m *mailNotifier) NotifyIssueChangeStatus(doer *models.User, issue *models.Issue, actionComment *models.Comment, isClosed bool) { var actionType models.ActionType - issue.Content = "" if issue.IsPull { if isClosed { actionType = models.ActionClosePullRequest @@ -74,6 +73,18 @@ func (m *mailNotifier) NotifyIssueChangeStatus(doer *models.User, issue *models. } } +func (m *mailNotifier) NotifyIssueChangeTitle(doer *models.User, issue *models.Issue, oldTitle string) { + if err := issue.LoadPullRequest(); err != nil { + log.Error("issue.LoadPullRequest: %v", err) + return + } + if issue.IsPull && models.HasWorkInProgressPrefix(oldTitle) && !issue.PullRequest.IsWorkInProgress() { + if err := mailer.MailParticipants(issue, doer, models.ActionPullRequestReadyForReview, nil); err != nil { + log.Error("MailParticipants: %v", err) + } + } +} + func (m *mailNotifier) NotifyNewPullRequest(pr *models.PullRequest, mentions []*models.User) { if err := mailer.MailParticipants(pr.Issue, pr.Issue.Poster, models.ActionCreatePullRequest, mentions); err != nil { log.Error("MailParticipants: %v", err) @@ -124,7 +135,6 @@ func (m *mailNotifier) NotifyMergePullRequest(pr *models.PullRequest, doer *mode log.Error("pr.LoadIssue: %v", err) return } - pr.Issue.Content = "" if err := mailer.MailParticipants(pr.Issue, doer, models.ActionMergePullRequest, nil); err != nil { log.Error("MailParticipants: %v", err) } @@ -151,8 +161,6 @@ func (m *mailNotifier) NotifyPullRequestPushCommits(doer *models.User, pr *model if err := comment.LoadPushCommits(); err != nil { log.Error("comment.LoadPushCommits: %v", err) } - comment.Content = "" - m.NotifyCreateIssueComment(doer, comment.Issue.Repo, comment.Issue, comment, nil) } diff --git a/modules/notification/ui/ui.go b/modules/notification/ui/ui.go index b1374f5608fd..f372d6759ce2 100644 --- a/modules/notification/ui/ui.go +++ b/modules/notification/ui/ui.go @@ -94,6 +94,19 @@ func (ns *notificationService) NotifyIssueChangeStatus(doer *models.User, issue }) } +func (ns *notificationService) NotifyIssueChangeTitle(doer *models.User, issue *models.Issue, oldTitle string) { + if err := issue.LoadPullRequest(); err != nil { + log.Error("issue.LoadPullRequest: %v", err) + return + } + if issue.IsPull && models.HasWorkInProgressPrefix(oldTitle) && !issue.PullRequest.IsWorkInProgress() { + _ = ns.issueQueue.Push(issueNotificationOpts{ + IssueID: issue.ID, + NotificationAuthorID: doer.ID, + }) + } +} + func (ns *notificationService) NotifyMergePullRequest(pr *models.PullRequest, doer *models.User) { _ = ns.issueQueue.Push(issueNotificationOpts{ IssueID: pr.Issue.ID, @@ -106,15 +119,32 @@ func (ns *notificationService) NotifyNewPullRequest(pr *models.PullRequest, ment log.Error("Unable to load issue: %d for pr: %d: Error: %v", pr.IssueID, pr.ID, err) return } - _ = ns.issueQueue.Push(issueNotificationOpts{ - IssueID: pr.Issue.ID, - NotificationAuthorID: pr.Issue.PosterID, - }) + toNotify := make(map[int64]struct{}, 32) + repoWatchers, err := models.GetRepoWatchersIDs(pr.Issue.RepoID) + if err != nil { + log.Error("GetRepoWatchersIDs: %v", err) + return + } + for _, id := range repoWatchers { + toNotify[id] = struct{}{} + } + issueParticipants, err := models.GetParticipantsIDsByIssueID(pr.IssueID) + if err != nil { + log.Error("GetParticipantsIDsByIssueID: %v", err) + return + } + for _, id := range issueParticipants { + toNotify[id] = struct{}{} + } + delete(toNotify, pr.Issue.PosterID) for _, mention := range mentions { + toNotify[mention.ID] = struct{}{} + } + for receiverID := range toNotify { _ = ns.issueQueue.Push(issueNotificationOpts{ IssueID: pr.Issue.ID, NotificationAuthorID: pr.Issue.PosterID, - ReceiverID: mention.ID, + ReceiverID: receiverID, }) } } diff --git a/modules/notification/webhook/webhook.go b/modules/notification/webhook/webhook.go index 90dc59021c5c..acdb91efe373 100644 --- a/modules/notification/webhook/webhook.go +++ b/modules/notification/webhook/webhook.go @@ -562,7 +562,7 @@ func (m *webhookNotifier) NotifyIssueChangeMilestone(doer *models.User, issue *m func (m *webhookNotifier) NotifyPushCommits(pusher *models.User, repo *models.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits) { apiPusher := convert.ToUser(pusher, nil) - apiCommits, err := commits.ToAPIPayloadCommits(repo.RepoPath(), repo.HTMLURL()) + apiCommits, apiHeadCommit, err := commits.ToAPIPayloadCommits(repo.RepoPath(), repo.HTMLURL()) if err != nil { log.Error("commits.ToAPIPayloadCommits failed: %v", err) return @@ -574,6 +574,7 @@ func (m *webhookNotifier) NotifyPushCommits(pusher *models.User, repo *models.Re After: opts.NewCommitID, CompareURL: setting.AppURL + commits.CompareURL, Commits: apiCommits, + HeadCommit: apiHeadCommit, Repo: convert.ToRepo(repo, models.AccessModeOwner), Pusher: apiPusher, Sender: apiPusher, @@ -790,7 +791,7 @@ func (m *webhookNotifier) NotifyDeleteRelease(doer *models.User, rel *models.Rel func (m *webhookNotifier) NotifySyncPushCommits(pusher *models.User, repo *models.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits) { apiPusher := convert.ToUser(pusher, nil) - apiCommits, err := commits.ToAPIPayloadCommits(repo.RepoPath(), repo.HTMLURL()) + apiCommits, apiHeadCommit, err := commits.ToAPIPayloadCommits(repo.RepoPath(), repo.HTMLURL()) if err != nil { log.Error("commits.ToAPIPayloadCommits failed: %v", err) return @@ -802,6 +803,7 @@ func (m *webhookNotifier) NotifySyncPushCommits(pusher *models.User, repo *model After: opts.NewCommitID, CompareURL: setting.AppURL + commits.CompareURL, Commits: apiCommits, + HeadCommit: apiHeadCommit, Repo: convert.ToRepo(repo, models.AccessModeOwner), Pusher: apiPusher, Sender: apiPusher, diff --git a/modules/password/password_test.go b/modules/password/password_test.go index 4325086b50b8..63f98aa9c354 100644 --- a/modules/password/password_test.go +++ b/modules/password/password_test.go @@ -54,7 +54,7 @@ func TestComplexity_Generate(t *testing.T) { for i := 0; i < maxCount; i++ { pwd, err := Generate(pwdLen) assert.NoError(t, err) - assert.Equal(t, pwdLen, len(pwd)) + assert.Len(t, pwd, pwdLen) assert.True(t, IsComplexEnough(pwd), "Failed complexities with modes %+v for generated: %s", modes, pwd) } } diff --git a/modules/private/hook.go b/modules/private/hook.go index 178500f7362d..79fae052dd76 100644 --- a/modules/private/hook.go +++ b/modules/private/hook.go @@ -5,6 +5,8 @@ package private import ( + "context" + "encoding/json" "fmt" "net/http" "net/url" @@ -53,10 +55,16 @@ type HookOptions struct { GitAlternativeObjectDirectories string GitQuarantinePath string GitPushOptions GitPushOptions - ProtectedBranchID int64 + PullRequestID int64 IsDeployKey bool } +// SSHLogOption ssh log options +type SSHLogOption struct { + IsError bool + Message string +} + // HookPostReceiveResult represents an individual result from PostReceive type HookPostReceiveResult struct { Results []HookPostReceiveBranchResult @@ -73,12 +81,12 @@ type HookPostReceiveBranchResult struct { } // HookPreReceive check whether the provided commits are allowed -func HookPreReceive(ownerName, repoName string, opts HookOptions) (int, string) { +func HookPreReceive(ctx context.Context, ownerName, repoName string, opts HookOptions) (int, string) { reqURL := setting.LocalURL + fmt.Sprintf("api/internal/hook/pre-receive/%s/%s", url.PathEscape(ownerName), url.PathEscape(repoName), ) - req := newInternalRequest(reqURL, "POST") + req := newInternalRequest(ctx, reqURL, "POST") req = req.Header("Content-Type", "application/json") json := jsoniter.ConfigCompatibleWithStandardLibrary jsonBytes, _ := json.Marshal(opts) @@ -98,13 +106,13 @@ func HookPreReceive(ownerName, repoName string, opts HookOptions) (int, string) } // HookPostReceive updates services and users -func HookPostReceive(ownerName, repoName string, opts HookOptions) (*HookPostReceiveResult, string) { +func HookPostReceive(ctx context.Context, ownerName, repoName string, opts HookOptions) (*HookPostReceiveResult, string) { reqURL := setting.LocalURL + fmt.Sprintf("api/internal/hook/post-receive/%s/%s", url.PathEscape(ownerName), url.PathEscape(repoName), ) - req := newInternalRequest(reqURL, "POST") + req := newInternalRequest(ctx, reqURL, "POST") req = req.Header("Content-Type", "application/json") req.SetTimeout(60*time.Second, time.Duration(60+len(opts.OldCommitIDs))*time.Second) json := jsoniter.ConfigCompatibleWithStandardLibrary @@ -126,13 +134,13 @@ func HookPostReceive(ownerName, repoName string, opts HookOptions) (*HookPostRec } // SetDefaultBranch will set the default branch to the provided branch for the provided repository -func SetDefaultBranch(ownerName, repoName, branch string) error { +func SetDefaultBranch(ctx context.Context, ownerName, repoName, branch string) error { reqURL := setting.LocalURL + fmt.Sprintf("api/internal/hook/set-default-branch/%s/%s/%s", url.PathEscape(ownerName), url.PathEscape(repoName), url.PathEscape(branch), ) - req := newInternalRequest(reqURL, "POST") + req := newInternalRequest(ctx, reqURL, "POST") req = req.Header("Content-Type", "application/json") req.SetTimeout(60*time.Second, 60*time.Second) @@ -146,3 +154,28 @@ func SetDefaultBranch(ownerName, repoName, branch string) error { } return nil } + +// SSHLog sends ssh error log response +func SSHLog(ctx context.Context, isErr bool, msg string) error { + reqURL := setting.LocalURL + "api/internal/ssh/log" + req := newInternalRequest(ctx, reqURL, "POST") + req = req.Header("Content-Type", "application/json") + + jsonBytes, _ := json.Marshal(&SSHLogOption{ + IsError: isErr, + Message: msg, + }) + req.Body(jsonBytes) + + req.SetTimeout(60*time.Second, 60*time.Second) + resp, err := req.Response() + if err != nil { + return fmt.Errorf("unable to contact gitea: %v", err) + } + + defer resp.Body.Close() + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("Error returned from gitea: %v", decodeJSONError(resp).Err) + } + return nil +} diff --git a/modules/private/internal.go b/modules/private/internal.go index 360fae47b6e8..672ac74970ed 100644 --- a/modules/private/internal.go +++ b/modules/private/internal.go @@ -5,6 +5,7 @@ package private import ( + "context" "crypto/tls" "fmt" "net" @@ -15,9 +16,11 @@ import ( jsoniter "github.com/json-iterator/go" ) -func newRequest(url, method string) *httplib.Request { - return httplib.NewRequest(url, method).Header("Authorization", - fmt.Sprintf("Bearer %s", setting.InternalToken)) +func newRequest(ctx context.Context, url, method string) *httplib.Request { + return httplib.NewRequest(url, method). + SetContext(ctx). + Header("Authorization", + fmt.Sprintf("Bearer %s", setting.InternalToken)) } // Response internal request response @@ -35,8 +38,8 @@ func decodeJSONError(resp *http.Response) *Response { return &res } -func newInternalRequest(url, method string) *httplib.Request { - req := newRequest(url, method).SetTLSClientConfig(&tls.Config{ +func newInternalRequest(ctx context.Context, url, method string) *httplib.Request { + req := newRequest(ctx, url, method).SetTLSClientConfig(&tls.Config{ InsecureSkipVerify: true, ServerName: setting.Domain, }) @@ -45,6 +48,10 @@ func newInternalRequest(url, method string) *httplib.Request { Dial: func(_, _ string) (net.Conn, error) { return net.Dial("unix", setting.HTTPAddr) }, + DialContext: func(ctx context.Context, _, _ string) (net.Conn, error) { + var d net.Dialer + return d.DialContext(ctx, "unix", setting.HTTPAddr) + }, }) } return req diff --git a/modules/private/key.go b/modules/private/key.go index bea7837906a0..d0b11a96e7ac 100644 --- a/modules/private/key.go +++ b/modules/private/key.go @@ -5,6 +5,7 @@ package private import ( + "context" "fmt" "io/ioutil" "net/http" @@ -13,10 +14,10 @@ import ( ) // UpdatePublicKeyInRepo update public key and if necessary deploy key updates -func UpdatePublicKeyInRepo(keyID, repoID int64) error { +func UpdatePublicKeyInRepo(ctx context.Context, keyID, repoID int64) error { // Ask for running deliver hook and test pull request tasks. reqURL := setting.LocalURL + fmt.Sprintf("api/internal/ssh/%d/update/%d", keyID, repoID) - resp, err := newInternalRequest(reqURL, "POST").Response() + resp, err := newInternalRequest(ctx, reqURL, "POST").Response() if err != nil { return err } @@ -32,10 +33,10 @@ func UpdatePublicKeyInRepo(keyID, repoID int64) error { // AuthorizedPublicKeyByContent searches content as prefix (leak e-mail part) // and returns public key found. -func AuthorizedPublicKeyByContent(content string) (string, error) { +func AuthorizedPublicKeyByContent(ctx context.Context, content string) (string, error) { // Ask for running deliver hook and test pull request tasks. reqURL := setting.LocalURL + "api/internal/ssh/authorized_keys" - req := newInternalRequest(reqURL, "POST") + req := newInternalRequest(ctx, reqURL, "POST") req.Param("content", content) resp, err := req.Response() if err != nil { diff --git a/modules/private/mail.go b/modules/private/mail.go index 9c0912a6e349..4a5a3eedd794 100644 --- a/modules/private/mail.go +++ b/modules/private/mail.go @@ -5,6 +5,7 @@ package private import ( + "context" "fmt" "io/ioutil" "net/http" @@ -27,10 +28,10 @@ type Email struct { // // If to list == nil its supposed to send an email to every // user present in DB -func SendEmail(subject, message string, to []string) (int, string) { +func SendEmail(ctx context.Context, subject, message string, to []string) (int, string) { reqURL := setting.LocalURL + "api/internal/mail/send" - req := newInternalRequest(reqURL, "POST") + req := newInternalRequest(ctx, reqURL, "POST") req = req.Header("Content-Type", "application/json") json := jsoniter.ConfigCompatibleWithStandardLibrary jsonBytes, _ := json.Marshal(Email{ diff --git a/modules/private/manager.go b/modules/private/manager.go index 2bc6cec3b968..0bcc3f811248 100644 --- a/modules/private/manager.go +++ b/modules/private/manager.go @@ -5,6 +5,7 @@ package private import ( + "context" "fmt" "net/http" "net/url" @@ -15,10 +16,10 @@ import ( ) // Shutdown calls the internal shutdown function -func Shutdown() (int, string) { +func Shutdown(ctx context.Context) (int, string) { reqURL := setting.LocalURL + "api/internal/manager/shutdown" - req := newInternalRequest(reqURL, "POST") + req := newInternalRequest(ctx, reqURL, "POST") resp, err := req.Response() if err != nil { return http.StatusInternalServerError, fmt.Sprintf("Unable to contact gitea: %v", err.Error()) @@ -33,10 +34,10 @@ func Shutdown() (int, string) { } // Restart calls the internal restart function -func Restart() (int, string) { +func Restart(ctx context.Context) (int, string) { reqURL := setting.LocalURL + "api/internal/manager/restart" - req := newInternalRequest(reqURL, "POST") + req := newInternalRequest(ctx, reqURL, "POST") resp, err := req.Response() if err != nil { return http.StatusInternalServerError, fmt.Sprintf("Unable to contact gitea: %v", err.Error()) @@ -57,10 +58,10 @@ type FlushOptions struct { } // FlushQueues calls the internal flush-queues function -func FlushQueues(timeout time.Duration, nonBlocking bool) (int, string) { +func FlushQueues(ctx context.Context, timeout time.Duration, nonBlocking bool) (int, string) { reqURL := setting.LocalURL + "api/internal/manager/flush-queues" - req := newInternalRequest(reqURL, "POST") + req := newInternalRequest(ctx, reqURL, "POST") if timeout > 0 { req.SetTimeout(timeout+10*time.Second, timeout+10*time.Second) } @@ -85,10 +86,10 @@ func FlushQueues(timeout time.Duration, nonBlocking bool) (int, string) { } // PauseLogging pauses logging -func PauseLogging() (int, string) { +func PauseLogging(ctx context.Context) (int, string) { reqURL := setting.LocalURL + "api/internal/manager/pause-logging" - req := newInternalRequest(reqURL, "POST") + req := newInternalRequest(ctx, reqURL, "POST") resp, err := req.Response() if err != nil { return http.StatusInternalServerError, fmt.Sprintf("Unable to contact gitea: %v", err.Error()) @@ -103,10 +104,10 @@ func PauseLogging() (int, string) { } // ResumeLogging resumes logging -func ResumeLogging() (int, string) { +func ResumeLogging(ctx context.Context) (int, string) { reqURL := setting.LocalURL + "api/internal/manager/resume-logging" - req := newInternalRequest(reqURL, "POST") + req := newInternalRequest(ctx, reqURL, "POST") resp, err := req.Response() if err != nil { return http.StatusInternalServerError, fmt.Sprintf("Unable to contact gitea: %v", err.Error()) @@ -121,10 +122,10 @@ func ResumeLogging() (int, string) { } // ReleaseReopenLogging releases and reopens logging files -func ReleaseReopenLogging() (int, string) { +func ReleaseReopenLogging(ctx context.Context) (int, string) { reqURL := setting.LocalURL + "api/internal/manager/release-and-reopen-logging" - req := newInternalRequest(reqURL, "POST") + req := newInternalRequest(ctx, reqURL, "POST") resp, err := req.Response() if err != nil { return http.StatusInternalServerError, fmt.Sprintf("Unable to contact gitea: %v", err.Error()) @@ -147,10 +148,10 @@ type LoggerOptions struct { } // AddLogger adds a logger -func AddLogger(group, name, mode string, config map[string]interface{}) (int, string) { +func AddLogger(ctx context.Context, group, name, mode string, config map[string]interface{}) (int, string) { reqURL := setting.LocalURL + "api/internal/manager/add-logger" - req := newInternalRequest(reqURL, "POST") + req := newInternalRequest(ctx, reqURL, "POST") req = req.Header("Content-Type", "application/json") json := jsoniter.ConfigCompatibleWithStandardLibrary jsonBytes, _ := json.Marshal(LoggerOptions{ @@ -175,10 +176,10 @@ func AddLogger(group, name, mode string, config map[string]interface{}) (int, st } // RemoveLogger removes a logger -func RemoveLogger(group, name string) (int, string) { +func RemoveLogger(ctx context.Context, group, name string) (int, string) { reqURL := setting.LocalURL + fmt.Sprintf("api/internal/manager/remove-logger/%s/%s", url.PathEscape(group), url.PathEscape(name)) - req := newInternalRequest(reqURL, "POST") + req := newInternalRequest(ctx, reqURL, "POST") resp, err := req.Response() if err != nil { return http.StatusInternalServerError, fmt.Sprintf("Unable to contact gitea: %v", err.Error()) diff --git a/modules/private/restore_repo.go b/modules/private/restore_repo.go new file mode 100644 index 000000000000..66b60d8d124b --- /dev/null +++ b/modules/private/restore_repo.go @@ -0,0 +1,61 @@ +// Copyright 2020 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package private + +import ( + "context" + "fmt" + "io/ioutil" + "net/http" + "time" + + "code.gitea.io/gitea/modules/setting" + jsoniter "github.com/json-iterator/go" +) + +// RestoreParams structure holds a data for restore repository +type RestoreParams struct { + RepoDir string + OwnerName string + RepoName string + Units []string +} + +// RestoreRepo calls the internal RestoreRepo function +func RestoreRepo(ctx context.Context, repoDir, ownerName, repoName string, units []string) (int, string) { + reqURL := setting.LocalURL + "api/internal/restore_repo" + + req := newInternalRequest(ctx, reqURL, "POST") + req.SetTimeout(3*time.Second, 0) // since the request will spend much time, don't timeout + req = req.Header("Content-Type", "application/json") + json := jsoniter.ConfigCompatibleWithStandardLibrary + jsonBytes, _ := json.Marshal(RestoreParams{ + RepoDir: repoDir, + OwnerName: ownerName, + RepoName: repoName, + Units: units, + }) + req.Body(jsonBytes) + resp, err := req.Response() + if err != nil { + return http.StatusInternalServerError, fmt.Sprintf("Unable to contact gitea: %v, could you confirm it's running?", err.Error()) + } + defer resp.Body.Close() + + if resp.StatusCode != 200 { + var ret = struct { + Err string `json:"err"` + }{} + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + return http.StatusInternalServerError, fmt.Sprintf("Response body error: %v", err.Error()) + } + if err := json.Unmarshal(body, &ret); err != nil { + return http.StatusInternalServerError, fmt.Sprintf("Response body Unmarshal error: %v", err.Error()) + } + } + + return http.StatusOK, fmt.Sprintf("Restore repo %s/%s successfully", ownerName, repoName) +} diff --git a/modules/private/serv.go b/modules/private/serv.go index e077b00ccc1c..9643dad679a1 100644 --- a/modules/private/serv.go +++ b/modules/private/serv.go @@ -5,6 +5,7 @@ package private import ( + "context" "fmt" "net/http" "net/url" @@ -21,10 +22,10 @@ type KeyAndOwner struct { } // ServNoCommand returns information about the provided key -func ServNoCommand(keyID int64) (*models.PublicKey, *models.User, error) { +func ServNoCommand(ctx context.Context, keyID int64) (*models.PublicKey, *models.User, error) { reqURL := setting.LocalURL + fmt.Sprintf("api/internal/serv/none/%d", keyID) - resp, err := newInternalRequest(reqURL, "GET").Response() + resp, err := newInternalRequest(ctx, reqURL, "GET").Response() if err != nil { return nil, nil, err } @@ -58,7 +59,6 @@ type ServCommandResults struct { // ErrServCommand is an error returned from ServCommmand. type ErrServCommand struct { Results ServCommandResults - Type string Err string StatusCode int } @@ -74,7 +74,7 @@ func IsErrServCommand(err error) bool { } // ServCommand preps for a serv call -func ServCommand(keyID int64, ownerName, repoName string, mode models.AccessMode, verbs ...string) (*ServCommandResults, error) { +func ServCommand(ctx context.Context, keyID int64, ownerName, repoName string, mode models.AccessMode, verbs ...string) (*ServCommandResults, error) { reqURL := setting.LocalURL + fmt.Sprintf("api/internal/serv/command/%d/%s/%s?mode=%d", keyID, url.PathEscape(ownerName), @@ -86,7 +86,7 @@ func ServCommand(keyID int64, ownerName, repoName string, mode models.AccessMode } } - resp, err := newInternalRequest(reqURL, "GET").Response() + resp, err := newInternalRequest(ctx, reqURL, "GET").Response() if err != nil { return nil, err } diff --git a/modules/public/dynamic.go b/modules/public/dynamic.go index a57b63636929..0bfe38bc3f3f 100644 --- a/modules/public/dynamic.go +++ b/modules/public/dynamic.go @@ -13,12 +13,11 @@ import ( "time" ) -// Static implements the static handler for serving assets. -func Static(opts *Options) func(next http.Handler) http.Handler { - return opts.staticHandler(opts.Directory) +func fileSystem(dir string) http.FileSystem { + return http.Dir(dir) } -// ServeContent serve http content -func ServeContent(w http.ResponseWriter, req *http.Request, fi os.FileInfo, modtime time.Time, content io.ReadSeeker) { +// serveContent serve http content +func serveContent(w http.ResponseWriter, req *http.Request, fi os.FileInfo, modtime time.Time, content io.ReadSeeker) { http.ServeContent(w, req, fi.Name(), modtime, content) } diff --git a/modules/public/public.go b/modules/public/public.go index c68f980352ab..a58709d86fc9 100644 --- a/modules/public/public.go +++ b/modules/public/public.go @@ -5,85 +5,82 @@ package public import ( - "log" "net/http" + "os" "path" "path/filepath" "strings" "code.gitea.io/gitea/modules/httpcache" + "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" ) // Options represents the available options to configure the handler. type Options struct { Directory string - IndexFile string - SkipLogging bool - FileSystem http.FileSystem Prefix string + CorsHandler func(http.Handler) http.Handler } -// KnownPublicEntries list all direct children in the `public` directory -var KnownPublicEntries = []string{ - "css", - "fonts", - "img", - "js", - "serviceworker.js", - "vendor", -} - -// Custom implements the static handler for serving custom assets. -func Custom(opts *Options) func(next http.Handler) http.Handler { - return opts.staticHandler(path.Join(setting.CustomPath, "public")) -} - -// staticFileSystem implements http.FileSystem interface. -type staticFileSystem struct { - dir *http.Dir -} - -func newStaticFileSystem(directory string) staticFileSystem { - if !filepath.IsAbs(directory) { - directory = filepath.Join(setting.AppWorkPath, directory) +// AssetsHandler implements the static handler for serving custom or original assets. +func AssetsHandler(opts *Options) func(next http.Handler) http.Handler { + var custPath = filepath.Join(setting.CustomPath, "public") + if !filepath.IsAbs(custPath) { + custPath = filepath.Join(setting.AppWorkPath, custPath) + } + if !filepath.IsAbs(opts.Directory) { + opts.Directory = filepath.Join(setting.AppWorkPath, opts.Directory) + } + if !strings.HasSuffix(opts.Prefix, "/") { + opts.Prefix += "/" } - dir := http.Dir(directory) - return staticFileSystem{&dir} -} -func (fs staticFileSystem) Open(name string) (http.File, error) { - return fs.dir.Open(name) -} + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { + if !strings.HasPrefix(req.URL.Path, opts.Prefix) { + next.ServeHTTP(resp, req) + return + } + if req.Method != "GET" && req.Method != "HEAD" { + resp.WriteHeader(http.StatusNotFound) + return + } -// StaticHandler sets up a new middleware for serving static files in the -func StaticHandler(dir string, opts *Options) func(next http.Handler) http.Handler { - return opts.staticHandler(dir) -} + file := req.URL.Path + file = file[len(opts.Prefix):] + if len(file) == 0 { + resp.WriteHeader(http.StatusNotFound) + return + } + if strings.Contains(file, "\\") { + resp.WriteHeader(http.StatusBadRequest) + return + } + file = "/" + file + + var written bool + if opts.CorsHandler != nil { + written = true + opts.CorsHandler(http.HandlerFunc(func(http.ResponseWriter, *http.Request) { + written = false + })).ServeHTTP(resp, req) + } + if written { + return + } -func (opts *Options) staticHandler(dir string) func(next http.Handler) http.Handler { - return func(next http.Handler) http.Handler { - // Defaults - if len(opts.IndexFile) == 0 { - opts.IndexFile = "index.html" - } - // Normalize the prefix if provided - if opts.Prefix != "" { - // Ensure we have a leading '/' - if opts.Prefix[0] != '/' { - opts.Prefix = "/" + opts.Prefix + // custom files + if opts.handle(resp, req, http.Dir(custPath), file) { + return } - // Remove any trailing '/' - opts.Prefix = strings.TrimRight(opts.Prefix, "/") - } - if opts.FileSystem == nil { - opts.FileSystem = newStaticFileSystem(dir) - } - return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { - if !opts.handle(w, req, opts) { - next.ServeHTTP(w, req) + // internal files + if opts.handle(resp, req, fileSystem(opts.Directory), file) { + return } + + resp.WriteHeader(http.StatusNotFound) }) } } @@ -98,76 +95,36 @@ func parseAcceptEncoding(val string) map[string]bool { return types } -func (opts *Options) handle(w http.ResponseWriter, req *http.Request, opt *Options) bool { - if req.Method != "GET" && req.Method != "HEAD" { - return false - } - - file := req.URL.Path - // if we have a prefix, filter requests by stripping the prefix - if opt.Prefix != "" { - if !strings.HasPrefix(file, opt.Prefix) { - return false - } - file = file[len(opt.Prefix):] - if file != "" && file[0] != '/' { - return false - } - } - - f, err := opt.FileSystem.Open(file) +func (opts *Options) handle(w http.ResponseWriter, req *http.Request, fs http.FileSystem, file string) bool { + // use clean to keep the file is a valid path with no . or .. + f, err := fs.Open(path.Clean(file)) if err != nil { - // 404 requests to any known entries in `public` - if path.Base(opts.Directory) == "public" { - parts := strings.Split(file, "/") - if len(parts) < 2 { - return false - } - for _, entry := range KnownPublicEntries { - if entry == parts[1] { - w.WriteHeader(404) - return true - } - } + if os.IsNotExist(err) { + return false } - return false + w.WriteHeader(http.StatusInternalServerError) + log.Error("[Static] Open %q failed: %v", file, err) + return true } defer f.Close() fi, err := f.Stat() if err != nil { - log.Printf("[Static] %q exists, but fails to open: %v", file, err) + w.WriteHeader(http.StatusInternalServerError) + log.Error("[Static] %q exists, but fails to open: %v", file, err) return true } // Try to serve index file if fi.IsDir() { - // Redirect if missing trailing slash. - if !strings.HasSuffix(req.URL.Path, "/") { - http.Redirect(w, req, path.Clean(req.URL.Path+"/"), http.StatusFound) - return true - } - - f, err = opt.FileSystem.Open(file) - if err != nil { - return false // Discard error. - } - defer f.Close() - - fi, err = f.Stat() - if err != nil || fi.IsDir() { - return false - } - } - - if !opt.SkipLogging { - log.Println("[Static] Serving " + file) + w.WriteHeader(http.StatusNotFound) + return true } if httpcache.HandleFileETagCache(req, w, fi) { return true } - ServeContent(w, req, fi, fi.ModTime(), f) + serveContent(w, req, fi, fi.ModTime(), f) return true } diff --git a/modules/public/static.go b/modules/public/static.go index 36cfdbe44f32..827dc2a1e0e8 100644 --- a/modules/public/static.go +++ b/modules/public/static.go @@ -20,12 +20,8 @@ import ( "code.gitea.io/gitea/modules/log" ) -// Static implements the static handler for serving assets. -func Static(opts *Options) func(next http.Handler) http.Handler { - opts.FileSystem = Assets - // we don't need to pass the directory, because the directory var is only - // used when in the options there is no FileSystem. - return opts.staticHandler("") +func fileSystem(dir string) http.FileSystem { + return Assets } func Asset(name string) ([]byte, error) { @@ -59,8 +55,8 @@ func AssetIsDir(name string) (bool, error) { } } -// ServeContent serve http content -func ServeContent(w http.ResponseWriter, req *http.Request, fi os.FileInfo, modtime time.Time, content io.ReadSeeker) { +// serveContent serve http content +func serveContent(w http.ResponseWriter, req *http.Request, fi os.FileInfo, modtime time.Time, content io.ReadSeeker) { encodings := parseAcceptEncoding(req.Header.Get("Accept-Encoding")) if encodings["gzip"] { if cf, ok := fi.(*vfsgen۰CompressedFileInfo); ok { @@ -76,7 +72,7 @@ func ServeContent(w http.ResponseWriter, req *http.Request, fi os.FileInfo, modt _, err := rd.Seek(0, io.SeekStart) // rewind to output whole file if err != nil { log.Error("rd.Seek error: %v", err) - http.Error(w, http.StatusText(500), 500) + http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError) return } } diff --git a/modules/queue/bytefifo.go b/modules/queue/bytefifo.go index 94478e6f05c4..3a10c8e1259c 100644 --- a/modules/queue/bytefifo.go +++ b/modules/queue/bytefifo.go @@ -4,14 +4,16 @@ package queue +import "context" + // ByteFIFO defines a FIFO that takes a byte array type ByteFIFO interface { // Len returns the length of the fifo - Len() int64 + Len(ctx context.Context) int64 // PushFunc pushes data to the end of the fifo and calls the callback if it is added - PushFunc(data []byte, fn func() error) error + PushFunc(ctx context.Context, data []byte, fn func() error) error // Pop pops data from the start of the fifo - Pop() ([]byte, error) + Pop(ctx context.Context) ([]byte, error) // Close this fifo Close() error } @@ -20,7 +22,7 @@ type ByteFIFO interface { type UniqueByteFIFO interface { ByteFIFO // Has returns whether the fifo contains this data - Has(data []byte) (bool, error) + Has(ctx context.Context, data []byte) (bool, error) } var _ ByteFIFO = &DummyByteFIFO{} @@ -29,12 +31,12 @@ var _ ByteFIFO = &DummyByteFIFO{} type DummyByteFIFO struct{} // PushFunc returns nil -func (*DummyByteFIFO) PushFunc(data []byte, fn func() error) error { +func (*DummyByteFIFO) PushFunc(ctx context.Context, data []byte, fn func() error) error { return nil } // Pop returns nil -func (*DummyByteFIFO) Pop() ([]byte, error) { +func (*DummyByteFIFO) Pop(ctx context.Context) ([]byte, error) { return []byte{}, nil } @@ -44,7 +46,7 @@ func (*DummyByteFIFO) Close() error { } // Len is always 0 -func (*DummyByteFIFO) Len() int64 { +func (*DummyByteFIFO) Len(ctx context.Context) int64 { return 0 } @@ -56,6 +58,6 @@ type DummyUniqueByteFIFO struct { } // Has always returns false -func (*DummyUniqueByteFIFO) Has([]byte) (bool, error) { +func (*DummyUniqueByteFIFO) Has(ctx context.Context, data []byte) (bool, error) { return false, nil } diff --git a/modules/queue/manager.go b/modules/queue/manager.go index da0fc606e6e1..a6d48575ab67 100644 --- a/modules/queue/manager.go +++ b/modules/queue/manager.go @@ -187,28 +187,31 @@ func (m *Manager) FlushAll(baseCtx context.Context, timeout time.Duration) error if flushable, ok := mq.Managed.(Flushable); ok { log.Debug("Flushing (flushable) queue: %s", mq.Name) go func(q *ManagedQueue) { - localCtx, localCancel := context.WithCancel(ctx) - pid := q.RegisterWorkers(1, start, hasTimeout, end, localCancel, true) + localCtx, localCtxCancel := context.WithCancel(ctx) + pid := q.RegisterWorkers(1, start, hasTimeout, end, localCtxCancel, true) err := flushable.FlushWithContext(localCtx) if err != nil && err != ctx.Err() { cancel() } q.CancelWorkers(pid) - localCancel() + localCtxCancel() wg.Done() }(mq) } else { - log.Debug("Queue: %s is non-empty but is not flushable - adding 100 millisecond wait", mq.Name) - go func() { - <-time.After(100 * time.Millisecond) - wg.Done() - }() + log.Debug("Queue: %s is non-empty but is not flushable", mq.Name) + wg.Done() } - } if allEmpty { + log.Debug("All queues are empty") break } + // Ensure there are always at least 100ms between loops but not more if we've actually been doing some flushign + // but don't delay cancellation here. + select { + case <-ctx.Done(): + case <-time.After(100 * time.Millisecond): + } wg.Wait() } return nil diff --git a/modules/queue/queue.go b/modules/queue/queue.go index d08cba35a1ea..7159048c1168 100644 --- a/modules/queue/queue.go +++ b/modules/queue/queue.go @@ -57,7 +57,7 @@ type Named interface { // Queues will handle their own contents in the Run method type Queue interface { Flushable - Run(atShutdown, atTerminate func(context.Context, func())) + Run(atShutdown, atTerminate func(func())) Push(Data) error } @@ -74,7 +74,7 @@ type DummyQueue struct { } // Run does nothing -func (*DummyQueue) Run(_, _ func(context.Context, func())) {} +func (*DummyQueue) Run(_, _ func(func())) {} // Push fakes a push of data to the queue func (*DummyQueue) Push(Data) error { @@ -122,7 +122,7 @@ type Immediate struct { } // Run does nothing -func (*Immediate) Run(_, _ func(context.Context, func())) {} +func (*Immediate) Run(_, _ func(func())) {} // Push fakes a push of data to the queue func (q *Immediate) Push(data Data) error { diff --git a/modules/queue/queue_bytefifo.go b/modules/queue/queue_bytefifo.go index bc8607849330..3ea61aad0e4c 100644 --- a/modules/queue/queue_bytefifo.go +++ b/modules/queue/queue_bytefifo.go @@ -17,8 +17,9 @@ import ( // ByteFIFOQueueConfiguration is the configuration for a ByteFIFOQueue type ByteFIFOQueueConfiguration struct { WorkerPoolConfiguration - Workers int - Name string + Workers int + Name string + WaitOnEmpty bool } var _ Queue = &ByteFIFOQueue{} @@ -26,14 +27,18 @@ var _ Queue = &ByteFIFOQueue{} // ByteFIFOQueue is a Queue formed from a ByteFIFO and WorkerPool type ByteFIFOQueue struct { *WorkerPool - byteFIFO ByteFIFO - typ Type - closed chan struct{} - terminated chan struct{} - exemplar interface{} - workers int - name string - lock sync.Mutex + byteFIFO ByteFIFO + typ Type + shutdownCtx context.Context + shutdownCtxCancel context.CancelFunc + terminateCtx context.Context + terminateCtxCancel context.CancelFunc + exemplar interface{} + workers int + name string + lock sync.Mutex + waitOnEmpty bool + pushed chan struct{} } // NewByteFIFOQueue creates a new ByteFIFOQueue @@ -44,15 +49,22 @@ func NewByteFIFOQueue(typ Type, byteFIFO ByteFIFO, handle HandlerFunc, cfg, exem } config := configInterface.(ByteFIFOQueueConfiguration) + terminateCtx, terminateCtxCancel := context.WithCancel(context.Background()) + shutdownCtx, shutdownCtxCancel := context.WithCancel(terminateCtx) + return &ByteFIFOQueue{ - WorkerPool: NewWorkerPool(handle, config.WorkerPoolConfiguration), - byteFIFO: byteFIFO, - typ: typ, - closed: make(chan struct{}), - terminated: make(chan struct{}), - exemplar: exemplar, - workers: config.Workers, - name: config.Name, + WorkerPool: NewWorkerPool(handle, config.WorkerPoolConfiguration), + byteFIFO: byteFIFO, + typ: typ, + shutdownCtx: shutdownCtx, + shutdownCtxCancel: shutdownCtxCancel, + terminateCtx: terminateCtx, + terminateCtxCancel: terminateCtxCancel, + exemplar: exemplar, + workers: config.Workers, + name: config.Name, + waitOnEmpty: config.WaitOnEmpty, + pushed: make(chan struct{}, 1), }, nil } @@ -76,7 +88,15 @@ func (q *ByteFIFOQueue) PushFunc(data Data, fn func() error) error { if err != nil { return err } - return q.byteFIFO.PushFunc(bs, fn) + if q.waitOnEmpty { + defer func() { + select { + case q.pushed <- struct{}{}: + default: + } + }() + } + return q.byteFIFO.PushFunc(q.terminateCtx, bs, fn) } // IsEmpty checks if the queue is empty @@ -86,105 +106,160 @@ func (q *ByteFIFOQueue) IsEmpty() bool { if !q.WorkerPool.IsEmpty() { return false } - return q.byteFIFO.Len() == 0 + return q.byteFIFO.Len(q.terminateCtx) == 0 } // Run runs the bytefifo queue -func (q *ByteFIFOQueue) Run(atShutdown, atTerminate func(context.Context, func())) { - atShutdown(context.Background(), q.Shutdown) - atTerminate(context.Background(), q.Terminate) +func (q *ByteFIFOQueue) Run(atShutdown, atTerminate func(func())) { + atShutdown(q.Shutdown) + atTerminate(q.Terminate) log.Debug("%s: %s Starting", q.typ, q.name) - go func() { - _ = q.AddWorkers(q.workers, 0) - }() + _ = q.AddWorkers(q.workers, 0) - go q.readToChan() + log.Trace("%s: %s Now running", q.typ, q.name) + q.readToChan() - log.Trace("%s: %s Waiting til closed", q.typ, q.name) - <-q.closed + <-q.shutdownCtx.Done() log.Trace("%s: %s Waiting til done", q.typ, q.name) q.Wait() log.Trace("%s: %s Waiting til cleaned", q.typ, q.name) - ctx, cancel := context.WithCancel(context.Background()) - atTerminate(ctx, cancel) - q.CleanUp(ctx) - cancel() + q.CleanUp(q.terminateCtx) + q.terminateCtxCancel() } +const maxBackOffTime = time.Second * 3 + func (q *ByteFIFOQueue) readToChan() { + // handle quick cancels + select { + case <-q.shutdownCtx.Done(): + // tell the pool to shutdown. + q.baseCtxCancel() + return + default: + } + + // Default backoff values + backOffTime := time.Millisecond * 100 + +loop: for { - select { - case <-q.closed: - // tell the pool to shutdown. - q.cancel() - return - default: - q.lock.Lock() - bs, err := q.byteFIFO.Pop() - if err != nil { - q.lock.Unlock() - log.Error("%s: %s Error on Pop: %v", q.typ, q.name, err) - time.Sleep(time.Millisecond * 100) - continue + err := q.doPop() + if err == errQueueEmpty { + log.Trace("%s: %s Waiting on Empty", q.typ, q.name) + select { + case <-q.pushed: + // reset backOffTime + backOffTime = 100 * time.Millisecond + continue loop + case <-q.shutdownCtx.Done(): + // Oops we've been shutdown whilst waiting + // Make sure the worker pool is shutdown too + q.baseCtxCancel() + return } + } - if len(bs) == 0 { - q.lock.Unlock() - time.Sleep(time.Millisecond * 100) - continue - } + // Reset the backOffTime if there is no error or an unmarshalError + if err == nil || err == errUnmarshal { + backOffTime = 100 * time.Millisecond + } - data, err := unmarshalAs(bs, q.exemplar) - if err != nil { - log.Error("%s: %s Failed to unmarshal with error: %v", q.typ, q.name, err) - q.lock.Unlock() - time.Sleep(time.Millisecond * 100) - continue + if err != nil { + // Need to Backoff + select { + case <-q.shutdownCtx.Done(): + // Oops we've been shutdown whilst backing off + // Make sure the worker pool is shutdown too + q.baseCtxCancel() + return + case <-time.After(backOffTime): + // OK we've waited - so backoff a bit + backOffTime += backOffTime / 2 + if backOffTime > maxBackOffTime { + backOffTime = maxBackOffTime + } + continue loop } + } + select { + case <-q.shutdownCtx.Done(): + // Oops we've been shutdown + // Make sure the worker pool is shutdown too + q.baseCtxCancel() + return + default: + continue loop + } + } +} + +var errQueueEmpty = fmt.Errorf("empty queue") +var errEmptyBytes = fmt.Errorf("empty bytes") +var errUnmarshal = fmt.Errorf("failed to unmarshal") - log.Trace("%s %s: Task found: %#v", q.typ, q.name, data) - q.WorkerPool.Push(data) - q.lock.Unlock() +func (q *ByteFIFOQueue) doPop() error { + q.lock.Lock() + defer q.lock.Unlock() + bs, err := q.byteFIFO.Pop(q.shutdownCtx) + if err != nil { + if err == context.Canceled { + q.baseCtxCancel() + return err + } + log.Error("%s: %s Error on Pop: %v", q.typ, q.name, err) + return err + } + if len(bs) == 0 { + if q.waitOnEmpty && q.byteFIFO.Len(q.shutdownCtx) == 0 { + return errQueueEmpty } + return errEmptyBytes + } + + data, err := unmarshalAs(bs, q.exemplar) + if err != nil { + log.Error("%s: %s Failed to unmarshal with error: %v", q.typ, q.name, err) + return errUnmarshal } + + log.Trace("%s %s: Task found: %#v", q.typ, q.name, data) + q.WorkerPool.Push(data) + return nil } // Shutdown processing from this queue func (q *ByteFIFOQueue) Shutdown() { log.Trace("%s: %s Shutting down", q.typ, q.name) - q.lock.Lock() select { - case <-q.closed: + case <-q.shutdownCtx.Done(): + return default: - close(q.closed) } - q.lock.Unlock() + q.shutdownCtxCancel() log.Debug("%s: %s Shutdown", q.typ, q.name) } // IsShutdown returns a channel which is closed when this Queue is shutdown func (q *ByteFIFOQueue) IsShutdown() <-chan struct{} { - return q.closed + return q.shutdownCtx.Done() } // Terminate this queue and close the queue func (q *ByteFIFOQueue) Terminate() { log.Trace("%s: %s Terminating", q.typ, q.name) q.Shutdown() - q.lock.Lock() select { - case <-q.terminated: - q.lock.Unlock() + case <-q.terminateCtx.Done(): return default: } - close(q.terminated) - q.lock.Unlock() if log.IsDebug() { - log.Debug("%s: %s Closing with %d tasks left in queue", q.typ, q.name, q.byteFIFO.Len()) + log.Debug("%s: %s Closing with %d tasks left in queue", q.typ, q.name, q.byteFIFO.Len(q.terminateCtx)) } + q.terminateCtxCancel() if err := q.byteFIFO.Close(); err != nil { log.Error("Error whilst closing internal byte fifo in %s: %s: %v", q.typ, q.name, err) } @@ -193,7 +268,7 @@ func (q *ByteFIFOQueue) Terminate() { // IsTerminated returns a channel which is closed when this Queue is terminated func (q *ByteFIFOQueue) IsTerminated() <-chan struct{} { - return q.terminated + return q.terminateCtx.Done() } var _ UniqueQueue = &ByteFIFOUniqueQueue{} @@ -210,17 +285,21 @@ func NewByteFIFOUniqueQueue(typ Type, byteFIFO UniqueByteFIFO, handle HandlerFun return nil, err } config := configInterface.(ByteFIFOQueueConfiguration) + terminateCtx, terminateCtxCancel := context.WithCancel(context.Background()) + shutdownCtx, shutdownCtxCancel := context.WithCancel(terminateCtx) return &ByteFIFOUniqueQueue{ ByteFIFOQueue: ByteFIFOQueue{ - WorkerPool: NewWorkerPool(handle, config.WorkerPoolConfiguration), - byteFIFO: byteFIFO, - typ: typ, - closed: make(chan struct{}), - terminated: make(chan struct{}), - exemplar: exemplar, - workers: config.Workers, - name: config.Name, + WorkerPool: NewWorkerPool(handle, config.WorkerPoolConfiguration), + byteFIFO: byteFIFO, + typ: typ, + shutdownCtx: shutdownCtx, + shutdownCtxCancel: shutdownCtxCancel, + terminateCtx: terminateCtx, + terminateCtxCancel: terminateCtxCancel, + exemplar: exemplar, + workers: config.Workers, + name: config.Name, }, }, nil } @@ -235,5 +314,5 @@ func (q *ByteFIFOUniqueQueue) Has(data Data) (bool, error) { if err != nil { return false, err } - return q.byteFIFO.(UniqueByteFIFO).Has(bs) + return q.byteFIFO.(UniqueByteFIFO).Has(q.terminateCtx, bs) } diff --git a/modules/queue/queue_channel.go b/modules/queue/queue_channel.go index d7a11e79f5dc..4df64b69ee5e 100644 --- a/modules/queue/queue_channel.go +++ b/modules/queue/queue_channel.go @@ -27,9 +27,13 @@ type ChannelQueueConfiguration struct { // It is basically a very thin wrapper around a WorkerPool type ChannelQueue struct { *WorkerPool - exemplar interface{} - workers int - name string + shutdownCtx context.Context + shutdownCtxCancel context.CancelFunc + terminateCtx context.Context + terminateCtxCancel context.CancelFunc + exemplar interface{} + workers int + name string } // NewChannelQueue creates a memory channel queue @@ -42,28 +46,30 @@ func NewChannelQueue(handle HandlerFunc, cfg, exemplar interface{}) (Queue, erro if config.BatchLength == 0 { config.BatchLength = 1 } + + terminateCtx, terminateCtxCancel := context.WithCancel(context.Background()) + shutdownCtx, shutdownCtxCancel := context.WithCancel(terminateCtx) + queue := &ChannelQueue{ - WorkerPool: NewWorkerPool(handle, config.WorkerPoolConfiguration), - exemplar: exemplar, - workers: config.Workers, - name: config.Name, + WorkerPool: NewWorkerPool(handle, config.WorkerPoolConfiguration), + shutdownCtx: shutdownCtx, + shutdownCtxCancel: shutdownCtxCancel, + terminateCtx: terminateCtx, + terminateCtxCancel: terminateCtxCancel, + exemplar: exemplar, + workers: config.Workers, + name: config.Name, } queue.qid = GetManager().Add(queue, ChannelQueueType, config, exemplar) return queue, nil } // Run starts to run the queue -func (q *ChannelQueue) Run(atShutdown, atTerminate func(context.Context, func())) { - atShutdown(context.Background(), func() { - log.Warn("ChannelQueue: %s is not shutdownable!", q.name) - }) - atTerminate(context.Background(), func() { - log.Warn("ChannelQueue: %s is not terminatable!", q.name) - }) +func (q *ChannelQueue) Run(atShutdown, atTerminate func(func())) { + atShutdown(q.Shutdown) + atTerminate(q.Terminate) log.Debug("ChannelQueue: %s Starting", q.name) - go func() { - _ = q.AddWorkers(q.workers, 0) - }() + _ = q.AddWorkers(q.workers, 0) } // Push will push data into the queue @@ -75,6 +81,42 @@ func (q *ChannelQueue) Push(data Data) error { return nil } +// Shutdown processing from this queue +func (q *ChannelQueue) Shutdown() { + q.lock.Lock() + defer q.lock.Unlock() + select { + case <-q.shutdownCtx.Done(): + log.Trace("ChannelQueue: %s Already Shutting down", q.name) + return + default: + } + log.Trace("ChannelQueue: %s Shutting down", q.name) + go func() { + log.Trace("ChannelQueue: %s Flushing", q.name) + if err := q.FlushWithContext(q.terminateCtx); err != nil { + log.Warn("ChannelQueue: %s Terminated before completed flushing", q.name) + return + } + log.Debug("ChannelQueue: %s Flushed", q.name) + }() + q.shutdownCtxCancel() + log.Debug("ChannelQueue: %s Shutdown", q.name) +} + +// Terminate this queue and close the queue +func (q *ChannelQueue) Terminate() { + log.Trace("ChannelQueue: %s Terminating", q.name) + q.Shutdown() + select { + case <-q.terminateCtx.Done(): + return + default: + } + q.terminateCtxCancel() + log.Debug("ChannelQueue: %s Terminated", q.name) +} + // Name returns the name of this queue func (q *ChannelQueue) Name() string { return q.name diff --git a/modules/queue/queue_channel_test.go b/modules/queue/queue_channel_test.go index 08a64c0ab86c..f1ddd7ec9210 100644 --- a/modules/queue/queue_channel_test.go +++ b/modules/queue/queue_channel_test.go @@ -5,7 +5,6 @@ package queue import ( - "context" "testing" "time" @@ -21,7 +20,7 @@ func TestChannelQueue(t *testing.T) { } } - nilFn := func(_ context.Context, _ func()) {} + nilFn := func(_ func()) {} queue, err := NewChannelQueue(handle, ChannelQueueConfiguration{ @@ -37,7 +36,7 @@ func TestChannelQueue(t *testing.T) { }, &testData{}) assert.NoError(t, err) - assert.Equal(t, queue.(*ChannelQueue).WorkerPool.boostWorkers, 5) + assert.Equal(t, 5, queue.(*ChannelQueue).WorkerPool.boostWorkers) go queue.Run(nilFn, nilFn) @@ -61,16 +60,16 @@ func TestChannelQueue_Batch(t *testing.T) { } } - nilFn := func(_ context.Context, _ func()) {} + nilFn := func(_ func()) {} queue, err := NewChannelQueue(handle, ChannelQueueConfiguration{ WorkerPoolConfiguration: WorkerPoolConfiguration{ QueueLength: 20, BatchLength: 2, - BlockTimeout: 1 * time.Second, - BoostTimeout: 5 * time.Minute, - BoostWorkers: 5, + BlockTimeout: 0, + BoostTimeout: 0, + BoostWorkers: 0, MaxWorkers: 10, }, Workers: 1, diff --git a/modules/queue/queue_disk.go b/modules/queue/queue_disk.go index 6c15a8e63be2..911233a5d9a0 100644 --- a/modules/queue/queue_disk.go +++ b/modules/queue/queue_disk.go @@ -5,6 +5,8 @@ package queue import ( + "context" + "code.gitea.io/gitea/modules/nosql" "gitea.com/lunny/levelqueue" @@ -37,6 +39,7 @@ func NewLevelQueue(handle HandlerFunc, cfg, exemplar interface{}) (Queue, error) if len(config.ConnectionString) == 0 { config.ConnectionString = config.DataDir } + config.WaitOnEmpty = true byteFIFO, err := NewLevelQueueByteFIFO(config.ConnectionString, config.QueueName) if err != nil { @@ -82,7 +85,7 @@ func NewLevelQueueByteFIFO(connection, prefix string) (*LevelQueueByteFIFO, erro } // PushFunc will push data into the fifo -func (fifo *LevelQueueByteFIFO) PushFunc(data []byte, fn func() error) error { +func (fifo *LevelQueueByteFIFO) PushFunc(ctx context.Context, data []byte, fn func() error) error { if fn != nil { if err := fn(); err != nil { return err @@ -92,7 +95,7 @@ func (fifo *LevelQueueByteFIFO) PushFunc(data []byte, fn func() error) error { } // Pop pops data from the start of the fifo -func (fifo *LevelQueueByteFIFO) Pop() ([]byte, error) { +func (fifo *LevelQueueByteFIFO) Pop(ctx context.Context) ([]byte, error) { data, err := fifo.internal.RPop() if err != nil && err != levelqueue.ErrNotFound { return nil, err @@ -108,7 +111,7 @@ func (fifo *LevelQueueByteFIFO) Close() error { } // Len returns the length of the fifo -func (fifo *LevelQueueByteFIFO) Len() int64 { +func (fifo *LevelQueueByteFIFO) Len(ctx context.Context) int64 { return fifo.internal.Len() } diff --git a/modules/queue/queue_disk_channel.go b/modules/queue/queue_disk_channel.go index 801fd8a12235..c3a1c5781ef0 100644 --- a/modules/queue/queue_disk_channel.go +++ b/modules/queue/queue_disk_channel.go @@ -133,8 +133,9 @@ func (q *PersistableChannelQueue) Push(data Data) error { } // Run starts to run the queue -func (q *PersistableChannelQueue) Run(atShutdown, atTerminate func(context.Context, func())) { +func (q *PersistableChannelQueue) Run(atShutdown, atTerminate func(func())) { log.Debug("PersistableChannelQueue: %s Starting", q.delayedStarter.name) + _ = q.channelQueue.AddWorkers(q.channelQueue.workers, 0) q.lock.Lock() if q.internal == nil { @@ -147,34 +148,32 @@ func (q *PersistableChannelQueue) Run(atShutdown, atTerminate func(context.Conte } else { q.lock.Unlock() } - atShutdown(context.Background(), q.Shutdown) - atTerminate(context.Background(), q.Terminate) + atShutdown(q.Shutdown) + atTerminate(q.Terminate) - // Just run the level queue - we shut it down later - go q.internal.Run(func(_ context.Context, _ func()) {}, func(_ context.Context, _ func()) {}) - - go func() { - _ = q.channelQueue.AddWorkers(q.channelQueue.workers, 0) - }() + if lq, ok := q.internal.(*LevelQueue); ok && lq.byteFIFO.Len(lq.shutdownCtx) != 0 { + // Just run the level queue - we shut it down once it's flushed + go q.internal.Run(func(_ func()) {}, func(_ func()) {}) + go func() { + for !q.IsEmpty() { + _ = q.internal.Flush(0) + select { + case <-time.After(100 * time.Millisecond): + case <-q.internal.(*LevelQueue).shutdownCtx.Done(): + log.Warn("LevelQueue: %s shut down before completely flushed", q.internal.(*LevelQueue).Name()) + return + } + } + log.Debug("LevelQueue: %s flushed so shutting down", q.internal.(*LevelQueue).Name()) + q.internal.(*LevelQueue).Shutdown() + GetManager().Remove(q.internal.(*LevelQueue).qid) + }() + } else { + log.Debug("PersistableChannelQueue: %s Skipping running the empty level queue", q.delayedStarter.name) + q.internal.(*LevelQueue).Shutdown() + GetManager().Remove(q.internal.(*LevelQueue).qid) + } - log.Trace("PersistableChannelQueue: %s Waiting til closed", q.delayedStarter.name) - <-q.closed - log.Trace("PersistableChannelQueue: %s Cancelling pools", q.delayedStarter.name) - q.channelQueue.cancel() - q.internal.(*LevelQueue).cancel() - log.Trace("PersistableChannelQueue: %s Waiting til done", q.delayedStarter.name) - q.channelQueue.Wait() - q.internal.(*LevelQueue).Wait() - // Redirect all remaining data in the chan to the internal channel - go func() { - log.Trace("PersistableChannelQueue: %s Redirecting remaining data", q.delayedStarter.name) - for data := range q.channelQueue.dataChan { - _ = q.internal.Push(data) - atomic.AddInt64(&q.channelQueue.numInQueue, -1) - } - log.Trace("PersistableChannelQueue: %s Done Redirecting remaining data", q.delayedStarter.name) - }() - log.Trace("PersistableChannelQueue: %s Done main loop", q.delayedStarter.name) } // Flush flushes the queue and blocks till the queue is empty @@ -232,16 +231,37 @@ func (q *PersistableChannelQueue) IsEmpty() bool { func (q *PersistableChannelQueue) Shutdown() { log.Trace("PersistableChannelQueue: %s Shutting down", q.delayedStarter.name) q.lock.Lock() - defer q.lock.Unlock() + select { case <-q.closed: + q.lock.Unlock() + return default: - if q.internal != nil { - q.internal.(*LevelQueue).Shutdown() - } - close(q.closed) - log.Debug("PersistableChannelQueue: %s Shutdown", q.delayedStarter.name) } + q.channelQueue.Shutdown() + if q.internal != nil { + q.internal.(*LevelQueue).Shutdown() + } + close(q.closed) + q.lock.Unlock() + + log.Trace("PersistableChannelQueue: %s Cancelling pools", q.delayedStarter.name) + q.channelQueue.baseCtxCancel() + q.internal.(*LevelQueue).baseCtxCancel() + log.Trace("PersistableChannelQueue: %s Waiting til done", q.delayedStarter.name) + q.channelQueue.Wait() + q.internal.(*LevelQueue).Wait() + // Redirect all remaining data in the chan to the internal channel + go func() { + log.Trace("PersistableChannelQueue: %s Redirecting remaining data", q.delayedStarter.name) + for data := range q.channelQueue.dataChan { + _ = q.internal.Push(data) + atomic.AddInt64(&q.channelQueue.numInQueue, -1) + } + log.Trace("PersistableChannelQueue: %s Done Redirecting remaining data", q.delayedStarter.name) + }() + + log.Debug("PersistableChannelQueue: %s Shutdown", q.delayedStarter.name) } // Terminate this queue and close the queue @@ -250,6 +270,7 @@ func (q *PersistableChannelQueue) Terminate() { q.Shutdown() q.lock.Lock() defer q.lock.Unlock() + q.channelQueue.Terminate() if q.internal != nil { q.internal.(*LevelQueue).Terminate() } diff --git a/modules/queue/queue_disk_channel_test.go b/modules/queue/queue_disk_channel_test.go index 93061bffc658..561f98ca907b 100644 --- a/modules/queue/queue_disk_channel_test.go +++ b/modules/queue/queue_disk_channel_test.go @@ -5,10 +5,8 @@ package queue import ( - "context" "io/ioutil" "testing" - "time" "code.gitea.io/gitea/modules/util" "github.com/stretchr/testify/assert" @@ -32,17 +30,19 @@ func TestPersistableChannelQueue(t *testing.T) { defer util.RemoveAll(tmpDir) queue, err := NewPersistableChannelQueue(handle, PersistableChannelQueueConfiguration{ - DataDir: tmpDir, - BatchLength: 2, - QueueLength: 20, - Workers: 1, - MaxWorkers: 10, + DataDir: tmpDir, + BatchLength: 2, + QueueLength: 20, + Workers: 1, + BoostWorkers: 0, + MaxWorkers: 10, + Name: "first", }, &testData{}) assert.NoError(t, err) - go queue.Run(func(_ context.Context, shutdown func()) { + go queue.Run(func(shutdown func()) { queueShutdown = append(queueShutdown, shutdown) - }, func(_ context.Context, terminate func()) { + }, func(terminate func()) { queueTerminate = append(queueTerminate, terminate) }) @@ -64,13 +64,18 @@ func TestPersistableChannelQueue(t *testing.T) { assert.Equal(t, test2.TestString, result2.TestString) assert.Equal(t, test2.TestInt, result2.TestInt) + // test1 is a testData not a *testData so will be rejected err = queue.Push(test1) assert.Error(t, err) + // Now shutdown the queue for _, callback := range queueShutdown { callback() } - time.Sleep(200 * time.Millisecond) + + // Wait til it is closed + <-queue.(*PersistableChannelQueue).closed + err = queue.Push(&test1) assert.NoError(t, err) err = queue.Push(&test2) @@ -80,23 +85,33 @@ func TestPersistableChannelQueue(t *testing.T) { assert.Fail(t, "Handler processing should have stopped") default: } + + // terminate the queue for _, callback := range queueTerminate { callback() } + select { + case <-handleChan: + assert.Fail(t, "Handler processing should have stopped") + default: + } + // Reopen queue queue, err = NewPersistableChannelQueue(handle, PersistableChannelQueueConfiguration{ - DataDir: tmpDir, - BatchLength: 2, - QueueLength: 20, - Workers: 1, - MaxWorkers: 10, + DataDir: tmpDir, + BatchLength: 2, + QueueLength: 20, + Workers: 1, + BoostWorkers: 0, + MaxWorkers: 10, + Name: "second", }, &testData{}) assert.NoError(t, err) - go queue.Run(func(_ context.Context, shutdown func()) { + go queue.Run(func(shutdown func()) { queueShutdown = append(queueShutdown, shutdown) - }, func(_ context.Context, terminate func()) { + }, func(terminate func()) { queueTerminate = append(queueTerminate, terminate) }) diff --git a/modules/queue/queue_disk_test.go b/modules/queue/queue_disk_test.go index edaed49a5239..1f884d4f8d76 100644 --- a/modules/queue/queue_disk_test.go +++ b/modules/queue/queue_disk_test.go @@ -5,7 +5,6 @@ package queue import ( - "context" "io/ioutil" "sync" "testing" @@ -49,11 +48,11 @@ func TestLevelQueue(t *testing.T) { }, &testData{}) assert.NoError(t, err) - go queue.Run(func(_ context.Context, shutdown func()) { + go queue.Run(func(shutdown func()) { lock.Lock() queueShutdown = append(queueShutdown, shutdown) lock.Unlock() - }, func(_ context.Context, terminate func()) { + }, func(terminate func()) { lock.Lock() queueTerminate = append(queueTerminate, terminate) lock.Unlock() @@ -123,11 +122,11 @@ func TestLevelQueue(t *testing.T) { }, &testData{}) assert.NoError(t, err) - go queue.Run(func(_ context.Context, shutdown func()) { + go queue.Run(func(shutdown func()) { lock.Lock() queueShutdown = append(queueShutdown, shutdown) lock.Unlock() - }, func(_ context.Context, terminate func()) { + }, func(terminate func()) { lock.Lock() queueTerminate = append(queueTerminate, terminate) lock.Unlock() diff --git a/modules/queue/queue_redis.go b/modules/queue/queue_redis.go index af2cc30335b7..a5fb866dc1e1 100644 --- a/modules/queue/queue_redis.go +++ b/modules/queue/queue_redis.go @@ -6,7 +6,6 @@ package queue import ( "context" - "fmt" "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" @@ -47,8 +46,6 @@ func NewRedisQueue(handle HandlerFunc, cfg, exemplar interface{}) (Queue, error) return nil, err } - byteFIFO.ctx = graceful.NewChannelContext(byteFIFOQueue.IsTerminated(), fmt.Errorf("queue has been terminated")) - queue := &RedisQueue{ ByteFIFOQueue: byteFIFOQueue, } @@ -73,8 +70,8 @@ var _ ByteFIFO = &RedisByteFIFO{} // RedisByteFIFO represents a ByteFIFO formed from a redisClient type RedisByteFIFO struct { - ctx context.Context - client redisClient + client redisClient + queueName string } @@ -89,7 +86,6 @@ func NewRedisByteFIFO(config RedisByteFIFOConfiguration) (*RedisByteFIFO, error) fifo := &RedisByteFIFO{ queueName: config.QueueName, } - fifo.ctx = graceful.GetManager().TerminateContext() fifo.client = nosql.GetManager().GetRedisClient(config.ConnectionString) if err := fifo.client.Ping(graceful.GetManager().ShutdownContext()).Err(); err != nil { return nil, err @@ -98,18 +94,18 @@ func NewRedisByteFIFO(config RedisByteFIFOConfiguration) (*RedisByteFIFO, error) } // PushFunc pushes data to the end of the fifo and calls the callback if it is added -func (fifo *RedisByteFIFO) PushFunc(data []byte, fn func() error) error { +func (fifo *RedisByteFIFO) PushFunc(ctx context.Context, data []byte, fn func() error) error { if fn != nil { if err := fn(); err != nil { return err } } - return fifo.client.RPush(fifo.ctx, fifo.queueName, data).Err() + return fifo.client.RPush(ctx, fifo.queueName, data).Err() } // Pop pops data from the start of the fifo -func (fifo *RedisByteFIFO) Pop() ([]byte, error) { - data, err := fifo.client.LPop(fifo.ctx, fifo.queueName).Bytes() +func (fifo *RedisByteFIFO) Pop(ctx context.Context) ([]byte, error) { + data, err := fifo.client.LPop(ctx, fifo.queueName).Bytes() if err == nil || err == redis.Nil { return data, nil } @@ -122,8 +118,8 @@ func (fifo *RedisByteFIFO) Close() error { } // Len returns the length of the fifo -func (fifo *RedisByteFIFO) Len() int64 { - val, err := fifo.client.LLen(fifo.ctx, fifo.queueName).Result() +func (fifo *RedisByteFIFO) Len(ctx context.Context) int64 { + val, err := fifo.client.LLen(ctx, fifo.queueName).Result() if err != nil { log.Error("Error whilst getting length of redis queue %s: Error: %v", fifo.queueName, err) return -1 diff --git a/modules/queue/queue_wrapped.go b/modules/queue/queue_wrapped.go index 88d64e82464f..ec30ab028197 100644 --- a/modules/queue/queue_wrapped.go +++ b/modules/queue/queue_wrapped.go @@ -38,7 +38,7 @@ type delayedStarter struct { } // setInternal must be called with the lock locked. -func (q *delayedStarter) setInternal(atShutdown func(context.Context, func()), handle HandlerFunc, exemplar interface{}) error { +func (q *delayedStarter) setInternal(atShutdown func(func()), handle HandlerFunc, exemplar interface{}) error { var ctx context.Context var cancel context.CancelFunc if q.timeout > 0 { @@ -49,9 +49,7 @@ func (q *delayedStarter) setInternal(atShutdown func(context.Context, func()), h defer cancel() // Ensure we also stop at shutdown - atShutdown(ctx, func() { - cancel() - }) + atShutdown(cancel) i := 1 for q.internal == nil { @@ -221,7 +219,7 @@ func (q *WrappedQueue) IsEmpty() bool { } // Run starts to run the queue and attempts to create the internal queue -func (q *WrappedQueue) Run(atShutdown, atTerminate func(context.Context, func())) { +func (q *WrappedQueue) Run(atShutdown, atTerminate func(func())) { log.Debug("WrappedQueue: %s Starting", q.name) q.lock.Lock() if q.internal == nil { diff --git a/modules/queue/unique_queue_channel.go b/modules/queue/unique_queue_channel.go index dec1cfc5c06e..5bec67c4d355 100644 --- a/modules/queue/unique_queue_channel.go +++ b/modules/queue/unique_queue_channel.go @@ -28,11 +28,15 @@ type ChannelUniqueQueueConfiguration ChannelQueueConfiguration // only guaranteed whilst the task is waiting in the queue. type ChannelUniqueQueue struct { *WorkerPool - lock sync.Mutex - table map[Data]bool - exemplar interface{} - workers int - name string + lock sync.Mutex + table map[Data]bool + shutdownCtx context.Context + shutdownCtxCancel context.CancelFunc + terminateCtx context.Context + terminateCtxCancel context.CancelFunc + exemplar interface{} + workers int + name string } // NewChannelUniqueQueue create a memory channel queue @@ -45,11 +49,19 @@ func NewChannelUniqueQueue(handle HandlerFunc, cfg, exemplar interface{}) (Queue if config.BatchLength == 0 { config.BatchLength = 1 } + + terminateCtx, terminateCtxCancel := context.WithCancel(context.Background()) + shutdownCtx, shutdownCtxCancel := context.WithCancel(terminateCtx) + queue := &ChannelUniqueQueue{ - table: map[Data]bool{}, - exemplar: exemplar, - workers: config.Workers, - name: config.Name, + table: map[Data]bool{}, + shutdownCtx: shutdownCtx, + shutdownCtxCancel: shutdownCtxCancel, + terminateCtx: terminateCtx, + terminateCtxCancel: terminateCtxCancel, + exemplar: exemplar, + workers: config.Workers, + name: config.Name, } queue.WorkerPool = NewWorkerPool(func(data ...Data) { for _, datum := range data { @@ -65,17 +77,11 @@ func NewChannelUniqueQueue(handle HandlerFunc, cfg, exemplar interface{}) (Queue } // Run starts to run the queue -func (q *ChannelUniqueQueue) Run(atShutdown, atTerminate func(context.Context, func())) { - atShutdown(context.Background(), func() { - log.Warn("ChannelUniqueQueue: %s is not shutdownable!", q.name) - }) - atTerminate(context.Background(), func() { - log.Warn("ChannelUniqueQueue: %s is not terminatable!", q.name) - }) +func (q *ChannelUniqueQueue) Run(atShutdown, atTerminate func(func())) { + atShutdown(q.Shutdown) + atTerminate(q.Terminate) log.Debug("ChannelUniqueQueue: %s Starting", q.name) - go func() { - _ = q.AddWorkers(q.workers, 0) - }() + _ = q.AddWorkers(q.workers, 0) } // Push will push data into the queue if the data is not already in the queue @@ -122,6 +128,39 @@ func (q *ChannelUniqueQueue) Has(data Data) (bool, error) { return has, nil } +// Shutdown processing from this queue +func (q *ChannelUniqueQueue) Shutdown() { + log.Trace("ChannelUniqueQueue: %s Shutting down", q.name) + select { + case <-q.shutdownCtx.Done(): + return + default: + } + go func() { + log.Trace("ChannelUniqueQueue: %s Flushing", q.name) + if err := q.FlushWithContext(q.terminateCtx); err != nil { + log.Warn("ChannelUniqueQueue: %s Terminated before completed flushing", q.name) + return + } + log.Debug("ChannelUniqueQueue: %s Flushed", q.name) + }() + q.shutdownCtxCancel() + log.Debug("ChannelUniqueQueue: %s Shutdown", q.name) +} + +// Terminate this queue and close the queue +func (q *ChannelUniqueQueue) Terminate() { + log.Trace("ChannelUniqueQueue: %s Terminating", q.name) + q.Shutdown() + select { + case <-q.terminateCtx.Done(): + return + default: + } + q.terminateCtxCancel() + log.Debug("ChannelUniqueQueue: %s Terminated", q.name) +} + // Name returns the name of this queue func (q *ChannelUniqueQueue) Name() string { return q.name diff --git a/modules/queue/unique_queue_disk.go b/modules/queue/unique_queue_disk.go index 8ec8848bc498..bb0eb7d950c5 100644 --- a/modules/queue/unique_queue_disk.go +++ b/modules/queue/unique_queue_disk.go @@ -5,6 +5,8 @@ package queue import ( + "context" + "code.gitea.io/gitea/modules/nosql" "gitea.com/lunny/levelqueue" @@ -41,6 +43,7 @@ func NewLevelUniqueQueue(handle HandlerFunc, cfg, exemplar interface{}) (Queue, if len(config.ConnectionString) == 0 { config.ConnectionString = config.DataDir } + config.WaitOnEmpty = true byteFIFO, err := NewLevelUniqueQueueByteFIFO(config.ConnectionString, config.QueueName) if err != nil { @@ -86,12 +89,12 @@ func NewLevelUniqueQueueByteFIFO(connection, prefix string) (*LevelUniqueQueueBy } // PushFunc pushes data to the end of the fifo and calls the callback if it is added -func (fifo *LevelUniqueQueueByteFIFO) PushFunc(data []byte, fn func() error) error { +func (fifo *LevelUniqueQueueByteFIFO) PushFunc(ctx context.Context, data []byte, fn func() error) error { return fifo.internal.LPushFunc(data, fn) } // Pop pops data from the start of the fifo -func (fifo *LevelUniqueQueueByteFIFO) Pop() ([]byte, error) { +func (fifo *LevelUniqueQueueByteFIFO) Pop(ctx context.Context) ([]byte, error) { data, err := fifo.internal.RPop() if err != nil && err != levelqueue.ErrNotFound { return nil, err @@ -100,12 +103,12 @@ func (fifo *LevelUniqueQueueByteFIFO) Pop() ([]byte, error) { } // Len returns the length of the fifo -func (fifo *LevelUniqueQueueByteFIFO) Len() int64 { +func (fifo *LevelUniqueQueueByteFIFO) Len(ctx context.Context) int64 { return fifo.internal.Len() } // Has returns whether the fifo contains this data -func (fifo *LevelUniqueQueueByteFIFO) Has(data []byte) (bool, error) { +func (fifo *LevelUniqueQueueByteFIFO) Has(ctx context.Context, data []byte) (bool, error) { return fifo.internal.Has(data) } diff --git a/modules/queue/unique_queue_disk_channel.go b/modules/queue/unique_queue_disk_channel.go index 47c4f2bdd574..af42c0913d4d 100644 --- a/modules/queue/unique_queue_disk_channel.go +++ b/modules/queue/unique_queue_disk_channel.go @@ -36,7 +36,7 @@ type PersistableChannelUniqueQueueConfiguration struct { // task cannot be processed twice or more at the same time. Uniqueness is // only guaranteed whilst the task is waiting in the queue. type PersistableChannelUniqueQueue struct { - *ChannelUniqueQueue + channelQueue *ChannelUniqueQueue delayedStarter lock sync.Mutex closed chan struct{} @@ -85,8 +85,8 @@ func NewPersistableChannelUniqueQueue(handle HandlerFunc, cfg, exemplar interfac } queue := &PersistableChannelUniqueQueue{ - ChannelUniqueQueue: channelUniqueQueue.(*ChannelUniqueQueue), - closed: make(chan struct{}), + channelQueue: channelUniqueQueue.(*ChannelUniqueQueue), + closed: make(chan struct{}), } levelQueue, err := NewLevelUniqueQueue(func(data ...Data) { @@ -138,14 +138,14 @@ func (q *PersistableChannelUniqueQueue) PushFunc(data Data, fn func() error) err case <-q.closed: return q.internal.(UniqueQueue).PushFunc(data, fn) default: - return q.ChannelUniqueQueue.PushFunc(data, fn) + return q.channelQueue.PushFunc(data, fn) } } // Has will test if the queue has the data func (q *PersistableChannelUniqueQueue) Has(data Data) (bool, error) { // This is more difficult... - has, err := q.ChannelUniqueQueue.Has(data) + has, err := q.channelQueue.Has(data) if err != nil || has { return has, err } @@ -158,7 +158,7 @@ func (q *PersistableChannelUniqueQueue) Has(data Data) (bool, error) { } // Run starts to run the queue -func (q *PersistableChannelUniqueQueue) Run(atShutdown, atTerminate func(context.Context, func())) { +func (q *PersistableChannelUniqueQueue) Run(atShutdown, atTerminate func(func())) { log.Debug("PersistableChannelUniqueQueue: %s Starting", q.delayedStarter.name) q.lock.Lock() @@ -170,7 +170,7 @@ func (q *PersistableChannelUniqueQueue) Run(atShutdown, atTerminate func(context log.Error("Unable push to channelled queue: %v", err) } } - }, q.exemplar) + }, q.channelQueue.exemplar) q.lock.Unlock() if err != nil { log.Fatal("Unable to create internal queue for %s Error: %v", q.Name(), err) @@ -179,53 +179,73 @@ func (q *PersistableChannelUniqueQueue) Run(atShutdown, atTerminate func(context } else { q.lock.Unlock() } - atShutdown(context.Background(), q.Shutdown) - atTerminate(context.Background(), q.Terminate) + atShutdown(q.Shutdown) + atTerminate(q.Terminate) + _ = q.channelQueue.AddWorkers(q.channelQueue.workers, 0) - // Just run the level queue - we shut it down later - go q.internal.Run(func(_ context.Context, _ func()) {}, func(_ context.Context, _ func()) {}) - - go func() { - _ = q.ChannelUniqueQueue.AddWorkers(q.workers, 0) - }() + if luq, ok := q.internal.(*LevelUniqueQueue); ok && luq.ByteFIFOUniqueQueue.byteFIFO.Len(luq.shutdownCtx) != 0 { + // Just run the level queue - we shut it down once it's flushed + go q.internal.Run(func(_ func()) {}, func(_ func()) {}) + go func() { + _ = q.internal.Flush(0) + log.Debug("LevelUniqueQueue: %s flushed so shutting down", q.internal.(*LevelUniqueQueue).Name()) + q.internal.(*LevelUniqueQueue).Shutdown() + GetManager().Remove(q.internal.(*LevelUniqueQueue).qid) + }() + } else { + log.Debug("PersistableChannelUniqueQueue: %s Skipping running the empty level queue", q.delayedStarter.name) + q.internal.(*LevelUniqueQueue).Shutdown() + GetManager().Remove(q.internal.(*LevelUniqueQueue).qid) + } - log.Trace("PersistableChannelUniqueQueue: %s Waiting til closed", q.delayedStarter.name) - <-q.closed - log.Trace("PersistableChannelUniqueQueue: %s Cancelling pools", q.delayedStarter.name) - q.internal.(*LevelUniqueQueue).cancel() - q.ChannelUniqueQueue.cancel() - log.Trace("PersistableChannelUniqueQueue: %s Waiting til done", q.delayedStarter.name) - q.ChannelUniqueQueue.Wait() - q.internal.(*LevelUniqueQueue).Wait() - // Redirect all remaining data in the chan to the internal channel - go func() { - log.Trace("PersistableChannelUniqueQueue: %s Redirecting remaining data", q.delayedStarter.name) - for data := range q.ChannelUniqueQueue.dataChan { - _ = q.internal.Push(data) - } - log.Trace("PersistableChannelUniqueQueue: %s Done Redirecting remaining data", q.delayedStarter.name) - }() - log.Trace("PersistableChannelUniqueQueue: %s Done main loop", q.delayedStarter.name) } // Flush flushes the queue func (q *PersistableChannelUniqueQueue) Flush(timeout time.Duration) error { - return q.ChannelUniqueQueue.Flush(timeout) + return q.channelQueue.Flush(timeout) +} + +// FlushWithContext flushes the queue +func (q *PersistableChannelUniqueQueue) FlushWithContext(ctx context.Context) error { + return q.channelQueue.FlushWithContext(ctx) +} + +// IsEmpty checks if a queue is empty +func (q *PersistableChannelUniqueQueue) IsEmpty() bool { + return q.channelQueue.IsEmpty() } // Shutdown processing this queue func (q *PersistableChannelUniqueQueue) Shutdown() { log.Trace("PersistableChannelUniqueQueue: %s Shutting down", q.delayedStarter.name) q.lock.Lock() - defer q.lock.Unlock() select { case <-q.closed: + q.lock.Unlock() + return default: if q.internal != nil { q.internal.(*LevelUniqueQueue).Shutdown() } close(q.closed) + q.lock.Unlock() } + + log.Trace("PersistableChannelUniqueQueue: %s Cancelling pools", q.delayedStarter.name) + q.internal.(*LevelUniqueQueue).baseCtxCancel() + q.channelQueue.baseCtxCancel() + log.Trace("PersistableChannelUniqueQueue: %s Waiting til done", q.delayedStarter.name) + q.channelQueue.Wait() + q.internal.(*LevelUniqueQueue).Wait() + // Redirect all remaining data in the chan to the internal channel + go func() { + log.Trace("PersistableChannelUniqueQueue: %s Redirecting remaining data", q.delayedStarter.name) + for data := range q.channelQueue.dataChan { + _ = q.internal.Push(data) + } + log.Trace("PersistableChannelUniqueQueue: %s Done Redirecting remaining data", q.delayedStarter.name) + }() + log.Debug("PersistableChannelUniqueQueue: %s Shutdown", q.delayedStarter.name) } diff --git a/modules/queue/unique_queue_redis.go b/modules/queue/unique_queue_redis.go index 20a50cc1f235..7474c096655d 100644 --- a/modules/queue/unique_queue_redis.go +++ b/modules/queue/unique_queue_redis.go @@ -5,9 +5,8 @@ package queue import ( - "fmt" + "context" - "code.gitea.io/gitea/modules/graceful" "github.com/go-redis/redis/v8" ) @@ -51,8 +50,6 @@ func NewRedisUniqueQueue(handle HandlerFunc, cfg, exemplar interface{}) (Queue, return nil, err } - byteFIFO.ctx = graceful.NewChannelContext(byteFIFOQueue.IsTerminated(), fmt.Errorf("queue has been terminated")) - queue := &RedisUniqueQueue{ ByteFIFOUniqueQueue: byteFIFOQueue, } @@ -92,8 +89,8 @@ func NewRedisUniqueByteFIFO(config RedisUniqueByteFIFOConfiguration) (*RedisUniq } // PushFunc pushes data to the end of the fifo and calls the callback if it is added -func (fifo *RedisUniqueByteFIFO) PushFunc(data []byte, fn func() error) error { - added, err := fifo.client.SAdd(fifo.ctx, fifo.setName, data).Result() +func (fifo *RedisUniqueByteFIFO) PushFunc(ctx context.Context, data []byte, fn func() error) error { + added, err := fifo.client.SAdd(ctx, fifo.setName, data).Result() if err != nil { return err } @@ -105,12 +102,12 @@ func (fifo *RedisUniqueByteFIFO) PushFunc(data []byte, fn func() error) error { return err } } - return fifo.client.RPush(fifo.ctx, fifo.queueName, data).Err() + return fifo.client.RPush(ctx, fifo.queueName, data).Err() } // Pop pops data from the start of the fifo -func (fifo *RedisUniqueByteFIFO) Pop() ([]byte, error) { - data, err := fifo.client.LPop(fifo.ctx, fifo.queueName).Bytes() +func (fifo *RedisUniqueByteFIFO) Pop(ctx context.Context) ([]byte, error) { + data, err := fifo.client.LPop(ctx, fifo.queueName).Bytes() if err != nil && err != redis.Nil { return data, err } @@ -119,13 +116,13 @@ func (fifo *RedisUniqueByteFIFO) Pop() ([]byte, error) { return data, nil } - err = fifo.client.SRem(fifo.ctx, fifo.setName, data).Err() + err = fifo.client.SRem(ctx, fifo.setName, data).Err() return data, err } // Has returns whether the fifo contains this data -func (fifo *RedisUniqueByteFIFO) Has(data []byte) (bool, error) { - return fifo.client.SIsMember(fifo.ctx, fifo.setName, data).Result() +func (fifo *RedisUniqueByteFIFO) Has(ctx context.Context, data []byte) (bool, error) { + return fifo.client.SIsMember(ctx, fifo.setName, data).Result() } func init() { diff --git a/modules/queue/workerpool.go b/modules/queue/workerpool.go index 0f15ccac9efd..0176e2e0b2d2 100644 --- a/modules/queue/workerpool.go +++ b/modules/queue/workerpool.go @@ -21,7 +21,7 @@ import ( type WorkerPool struct { lock sync.Mutex baseCtx context.Context - cancel context.CancelFunc + baseCtxCancel context.CancelFunc cond *sync.Cond qid int64 maxNumberOfWorkers int @@ -52,7 +52,7 @@ func NewWorkerPool(handle HandlerFunc, config WorkerPoolConfiguration) *WorkerPo dataChan := make(chan Data, config.QueueLength) pool := &WorkerPool{ baseCtx: ctx, - cancel: cancel, + baseCtxCancel: cancel, batchLength: config.BatchLength, dataChan: dataChan, handle: handle, @@ -83,7 +83,7 @@ func (p *WorkerPool) Push(data Data) { } func (p *WorkerPool) zeroBoost() { - ctx, cancel := context.WithCancel(p.baseCtx) + ctx, cancel := context.WithTimeout(p.baseCtx, p.boostTimeout) mq := GetManager().GetManagedQueue(p.qid) boost := p.boostWorkers if (boost+p.numberOfWorkers) > p.maxNumberOfWorkers && p.maxNumberOfWorkers >= 0 { @@ -94,26 +94,14 @@ func (p *WorkerPool) zeroBoost() { start := time.Now() pid := mq.RegisterWorkers(boost, start, true, start.Add(p.boostTimeout), cancel, false) - go func() { - select { - case <-ctx.Done(): - case <-time.After(p.boostTimeout): - } + cancel = func() { mq.RemoveWorkers(pid) - cancel() - }() + } } else { log.Warn("WorkerPool: %d has zero workers - adding %d temporary workers for %s", p.qid, p.boostWorkers, p.boostTimeout) - go func() { - select { - case <-ctx.Done(): - case <-time.After(p.boostTimeout): - } - cancel() - }() } p.lock.Unlock() - p.addWorkers(ctx, boost) + p.addWorkers(ctx, cancel, boost) } func (p *WorkerPool) pushBoost(data Data) { @@ -140,7 +128,7 @@ func (p *WorkerPool) pushBoost(data Data) { return } p.blockTimeout *= 2 - ctx, cancel := context.WithCancel(p.baseCtx) + boostCtx, boostCtxCancel := context.WithCancel(p.baseCtx) mq := GetManager().GetManagedQueue(p.qid) boost := p.boostWorkers if (boost+p.numberOfWorkers) > p.maxNumberOfWorkers && p.maxNumberOfWorkers >= 0 { @@ -150,24 +138,24 @@ func (p *WorkerPool) pushBoost(data Data) { log.Warn("WorkerPool: %d (for %s) Channel blocked for %v - adding %d temporary workers for %s, block timeout now %v", p.qid, mq.Name, ourTimeout, boost, p.boostTimeout, p.blockTimeout) start := time.Now() - pid := mq.RegisterWorkers(boost, start, true, start.Add(p.boostTimeout), cancel, false) + pid := mq.RegisterWorkers(boost, start, true, start.Add(p.boostTimeout), boostCtxCancel, false) go func() { - <-ctx.Done() + <-boostCtx.Done() mq.RemoveWorkers(pid) - cancel() + boostCtxCancel() }() } else { log.Warn("WorkerPool: %d Channel blocked for %v - adding %d temporary workers for %s, block timeout now %v", p.qid, ourTimeout, p.boostWorkers, p.boostTimeout, p.blockTimeout) } go func() { <-time.After(p.boostTimeout) - cancel() + boostCtxCancel() p.lock.Lock() p.blockTimeout /= 2 p.lock.Unlock() }() p.lock.Unlock() - p.addWorkers(ctx, boost) + p.addWorkers(boostCtx, boostCtxCancel, boost) p.dataChan <- data } } @@ -243,28 +231,25 @@ func (p *WorkerPool) commonRegisterWorkers(number int, timeout time.Duration, is mq := GetManager().GetManagedQueue(p.qid) if mq != nil { pid := mq.RegisterWorkers(number, start, hasTimeout, end, cancel, isFlusher) - go func() { - <-ctx.Done() - mq.RemoveWorkers(pid) - cancel() - }() log.Trace("WorkerPool: %d (for %s) adding %d workers with group id: %d", p.qid, mq.Name, number, pid) - } else { - log.Trace("WorkerPool: %d adding %d workers (no group id)", p.qid, number) - + return ctx, func() { + mq.RemoveWorkers(pid) + } } + log.Trace("WorkerPool: %d adding %d workers (no group id)", p.qid, number) + return ctx, cancel } // AddWorkers adds workers to the pool - this allows the number of workers to go above the limit func (p *WorkerPool) AddWorkers(number int, timeout time.Duration) context.CancelFunc { ctx, cancel := p.commonRegisterWorkers(number, timeout, false) - p.addWorkers(ctx, number) + p.addWorkers(ctx, cancel, number) return cancel } // addWorkers adds workers to the pool -func (p *WorkerPool) addWorkers(ctx context.Context, number int) { +func (p *WorkerPool) addWorkers(ctx context.Context, cancel context.CancelFunc, number int) { for i := 0; i < number; i++ { p.lock.Lock() if p.cond == nil { @@ -279,11 +264,13 @@ func (p *WorkerPool) addWorkers(ctx context.Context, number int) { p.numberOfWorkers-- if p.numberOfWorkers == 0 { p.cond.Broadcast() + cancel() } else if p.numberOfWorkers < 0 { // numberOfWorkers can't go negative but... log.Warn("Number of Workers < 0 for QID %d - this shouldn't happen", p.qid) p.numberOfWorkers = 0 p.cond.Broadcast() + cancel() } p.lock.Unlock() }() diff --git a/modules/references/references.go b/modules/references/references.go index 6c0db0cf4761..ef859abcc79b 100644 --- a/modules/references/references.go +++ b/modules/references/references.go @@ -5,6 +5,7 @@ package references import ( + "bytes" "net/url" "regexp" "strconv" @@ -14,6 +15,8 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/markup/mdstripper" "code.gitea.io/gitea/modules/setting" + + "github.com/yuin/goldmark/util" ) var ( @@ -29,7 +32,7 @@ var ( // mentionPattern matches all mentions in the form of "@user" or "@org/team" mentionPattern = regexp.MustCompile(`(?:\s|^|\(|\[)(@[0-9a-zA-Z-_]+|@[0-9a-zA-Z-_]+\/?[0-9a-zA-Z-_]+|@[0-9a-zA-Z-_][0-9a-zA-Z-_.]+\/?[0-9a-zA-Z-_.]+[0-9a-zA-Z-_])(?:\s|[:,;.?!]\s|[:,;.?!]?$|\)|\])`) // issueNumericPattern matches string that references to a numeric issue, e.g. #1287 - issueNumericPattern = regexp.MustCompile(`(?:\s|^|\(|\[)([#!][0-9]+)(?:\s|$|\)|\]|[:;,.?!]\s|[:;,.?!]$)`) + issueNumericPattern = regexp.MustCompile(`(?:\s|^|\(|\[|\')([#!][0-9]+)(?:\s|$|\)|\]|[:;,.?!]\s|[:;,.?!]$)`) // issueAlphanumericPattern matches string that references to an alphanumeric issue, e.g. ABC-1234 issueAlphanumericPattern = regexp.MustCompile(`(?:\s|^|\(|\[)([A-Z]{1,10}-[1-9][0-9]*)(?:\s|$|\)|\]|:|\.(\s|$))`) // crossReferenceIssueNumericPattern matches string that references a numeric issue in a different repository @@ -321,7 +324,7 @@ func FindRenderizableReferenceNumeric(content string, prOnly bool) (bool, *Rende return false, nil } } - r := getCrossReference([]byte(content), match[2], match[3], false, prOnly) + r := getCrossReference(util.StringToReadOnlyBytes(content), match[2], match[3], false, prOnly) if r == nil { return false, nil } @@ -465,18 +468,17 @@ func findAllIssueReferencesBytes(content []byte, links []string) []*rawReference } func getCrossReference(content []byte, start, end int, fromLink bool, prOnly bool) *rawReference { - refid := string(content[start:end]) - sep := strings.IndexAny(refid, "#!") + sep := bytes.IndexAny(content[start:end], "#!") if sep < 0 { return nil } - isPull := refid[sep] == '!' + isPull := content[start+sep] == '!' if prOnly && !isPull { return nil } - repo := refid[:sep] - issue := refid[sep+1:] - index, err := strconv.ParseInt(issue, 10, 64) + repo := string(content[start : start+sep]) + issue := string(content[start+sep+1 : end]) + index, err := strconv.ParseInt(string(issue), 10, 64) if err != nil { return nil } diff --git a/modules/references/references_test.go b/modules/references/references_test.go index d4f080490d1f..11a31b41b683 100644 --- a/modules/references/references_test.go +++ b/modules/references/references_test.go @@ -197,6 +197,13 @@ func TestFindAllIssueReferences(t *testing.T) { {200, "user3", "repo4", "200", false, XRefActionNone, &RefSpan{Start: 5, End: 20}, nil, ""}, }, }, + { + "Merge pull request '#12345 My fix for a bug' (!1337) from feature-branch into main", + []testResult{ + {12345, "", "", "12345", false, XRefActionNone, &RefSpan{Start: 20, End: 26}, nil, ""}, + {1337, "", "", "1337", true, XRefActionNone, &RefSpan{Start: 46, End: 51}, nil, ""}, + }, + }, { "Which abc. #9434 same as above", []testResult{ @@ -474,7 +481,7 @@ func TestParseCloseKeywords(t *testing.T) { {",$!", "", ""}, {"1234", "", ""}, } { - // The patern only needs to match the part that precedes the reference. + // The pattern only needs to match the part that precedes the reference. // getCrossReference() takes care of finding the reference itself. pat := makeKeywordsPat([]string{test.pattern}) if test.expected == "" { diff --git a/modules/repofiles/action_test.go b/modules/repofiles/action_test.go index 290844de0267..97632df68fb8 100644 --- a/modules/repofiles/action_test.go +++ b/modules/repofiles/action_test.go @@ -250,7 +250,7 @@ func TestUpdateIssuesCommit_AnotherRepoNoPermission(t *testing.T) { user := models.AssertExistsAndLoadBean(t, &models.User{ID: 10}).(*models.User) // Test that a push with close reference *can not* close issue - // If the commiter doesn't have push rights in that repo + // If the committer doesn't have push rights in that repo pushCommits := []*repository.PushCommit{ { Sha1: "abcdef3", diff --git a/modules/repofiles/content_test.go b/modules/repofiles/content_test.go index 278216112296..253d43445929 100644 --- a/modules/repofiles/content_test.go +++ b/modules/repofiles/content_test.go @@ -69,7 +69,7 @@ func TestGetContents(t *testing.T) { assert.NoError(t, err) }) - t.Run("Get REAMDE.md contents with ref as empty string (should then use the repo's default branch) with GetContents()", func(t *testing.T) { + t.Run("Get README.md contents with ref as empty string (should then use the repo's default branch) with GetContents()", func(t *testing.T) { fileContentResponse, err := GetContents(ctx.Repo.Repository, treePath, "", false) assert.EqualValues(t, expectedContentsResponse, fileContentResponse) assert.NoError(t, err) @@ -132,7 +132,7 @@ func TestGetContentsOrListForFile(t *testing.T) { assert.NoError(t, err) }) - t.Run("Get REAMDE.md contents with ref as empty string (should then use the repo's default branch) with GetContentsOrList()", func(t *testing.T) { + t.Run("Get README.md contents with ref as empty string (should then use the repo's default branch) with GetContentsOrList()", func(t *testing.T) { fileContentResponse, err := GetContentsOrList(ctx.Repo.Repository, treePath, "") assert.EqualValues(t, expectedContentsResponse, fileContentResponse) assert.NoError(t, err) diff --git a/modules/repository/adopt.go b/modules/repository/adopt.go index 030211963056..321e6ab7672c 100644 --- a/modules/repository/adopt.go +++ b/modules/repository/adopt.go @@ -129,12 +129,12 @@ func ListUnadoptedRepositories(query string, opts *models.ListOptions) ([]string var err error globUser, err = glob.Compile(qsplit[0]) if err != nil { - log.Info("Invalid glob expresion '%s' (skipped): %v", qsplit[0], err) + log.Info("Invalid glob expression '%s' (skipped): %v", qsplit[0], err) } if len(qsplit) > 1 { globRepo, err = glob.Compile(qsplit[1]) if err != nil { - log.Info("Invalid glob expresion '%s' (skipped): %v", qsplit[1], err) + log.Info("Invalid glob expression '%s' (skipped): %v", qsplit[1], err) } } } @@ -228,7 +228,7 @@ func ListUnadoptedRepositories(query string, opts *models.ListOptions) ([]string found := false repoLoop: for i, repo := range repos { - if repo.Name == name { + if repo.LowerName == name { found = true repos = append(repos[:i], repos[i+1:]...) break repoLoop diff --git a/modules/repository/archive.go b/modules/repository/archive.go new file mode 100644 index 000000000000..bea636c57905 --- /dev/null +++ b/modules/repository/archive.go @@ -0,0 +1,20 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package repository + +import ( + "context" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/storage" +) + +// DeleteRepositoryArchives deletes all repositories' archives. +func DeleteRepositoryArchives(ctx context.Context) error { + if err := models.DeleteAllRepoArchives(); err != nil { + return err + } + return storage.Clean(storage.RepoArchives) +} diff --git a/modules/repository/cache.go b/modules/repository/cache.go index 5d6dea8fcb60..e574f1adb7f7 100644 --- a/modules/repository/cache.go +++ b/modules/repository/cache.go @@ -5,6 +5,7 @@ package repository import ( + "context" "strings" "code.gitea.io/gitea/models" @@ -23,7 +24,7 @@ func getRefName(fullRefName string) string { } // CacheRef cachhe last commit information of the branch or the tag -func CacheRef(repo *models.Repository, gitRepo *git.Repository, fullRefName string) error { +func CacheRef(ctx context.Context, repo *models.Repository, gitRepo *git.Repository, fullRefName string) error { if !setting.CacheService.LastCommit.Enabled { return nil } @@ -43,5 +44,5 @@ func CacheRef(repo *models.Repository, gitRepo *git.Repository, fullRefName stri commitCache := git.NewLastCommitCache(repo.FullName(), gitRepo, setting.LastCommitCacheTTLSeconds, cache.GetCache()) - return commitCache.CacheCommit(commit) + return commitCache.CacheCommit(ctx, commit) } diff --git a/modules/repository/commits.go b/modules/repository/commits.go index 6b67c2c26242..eaaf3b8b198b 100644 --- a/modules/repository/commits.go +++ b/modules/repository/commits.go @@ -28,8 +28,8 @@ type PushCommit struct { // PushCommits represents list of commits in a push operation. type PushCommits struct { - Len int Commits []*PushCommit + HeadCommit *PushCommit CompareURL string avatars map[string]string @@ -44,67 +44,88 @@ func NewPushCommits() *PushCommits { } } -// ToAPIPayloadCommits converts a PushCommits object to -// api.PayloadCommit format. -func (pc *PushCommits) ToAPIPayloadCommits(repoPath, repoLink string) ([]*api.PayloadCommit, error) { - commits := make([]*api.PayloadCommit, len(pc.Commits)) - - if pc.emailUsers == nil { - pc.emailUsers = make(map[string]*models.User) - } +// toAPIPayloadCommit converts a single PushCommit to an api.PayloadCommit object. +func (pc *PushCommits) toAPIPayloadCommit(repoPath, repoLink string, commit *PushCommit) (*api.PayloadCommit, error) { var err error - for i, commit := range pc.Commits { - authorUsername := "" - author, ok := pc.emailUsers[commit.AuthorEmail] - if !ok { - author, err = models.GetUserByEmail(commit.AuthorEmail) - if err == nil { - authorUsername = author.Name - pc.emailUsers[commit.AuthorEmail] = author - } - } else { + authorUsername := "" + author, ok := pc.emailUsers[commit.AuthorEmail] + if !ok { + author, err = models.GetUserByEmail(commit.AuthorEmail) + if err == nil { authorUsername = author.Name + pc.emailUsers[commit.AuthorEmail] = author } + } else { + authorUsername = author.Name + } - committerUsername := "" - committer, ok := pc.emailUsers[commit.CommitterEmail] - if !ok { - committer, err = models.GetUserByEmail(commit.CommitterEmail) - if err == nil { - // TODO: check errors other than email not found. - committerUsername = committer.Name - pc.emailUsers[commit.CommitterEmail] = committer - } - } else { + committerUsername := "" + committer, ok := pc.emailUsers[commit.CommitterEmail] + if !ok { + committer, err = models.GetUserByEmail(commit.CommitterEmail) + if err == nil { + // TODO: check errors other than email not found. committerUsername = committer.Name + pc.emailUsers[commit.CommitterEmail] = committer } + } else { + committerUsername = committer.Name + } - fileStatus, err := git.GetCommitFileStatus(repoPath, commit.Sha1) + fileStatus, err := git.GetCommitFileStatus(repoPath, commit.Sha1) + if err != nil { + return nil, fmt.Errorf("FileStatus [commit_sha1: %s]: %v", commit.Sha1, err) + } + + return &api.PayloadCommit{ + ID: commit.Sha1, + Message: commit.Message, + URL: fmt.Sprintf("%s/commit/%s", repoLink, commit.Sha1), + Author: &api.PayloadUser{ + Name: commit.AuthorName, + Email: commit.AuthorEmail, + UserName: authorUsername, + }, + Committer: &api.PayloadUser{ + Name: commit.CommitterName, + Email: commit.CommitterEmail, + UserName: committerUsername, + }, + Added: fileStatus.Added, + Removed: fileStatus.Removed, + Modified: fileStatus.Modified, + Timestamp: commit.Timestamp, + }, nil +} + +// ToAPIPayloadCommits converts a PushCommits object to api.PayloadCommit format. +// It returns all converted commits and, if provided, the head commit or an error otherwise. +func (pc *PushCommits) ToAPIPayloadCommits(repoPath, repoLink string) ([]*api.PayloadCommit, *api.PayloadCommit, error) { + commits := make([]*api.PayloadCommit, len(pc.Commits)) + var headCommit *api.PayloadCommit + + if pc.emailUsers == nil { + pc.emailUsers = make(map[string]*models.User) + } + for i, commit := range pc.Commits { + apiCommit, err := pc.toAPIPayloadCommit(repoPath, repoLink, commit) if err != nil { - return nil, fmt.Errorf("FileStatus [commit_sha1: %s]: %v", commit.Sha1, err) + return nil, nil, err } - commits[i] = &api.PayloadCommit{ - ID: commit.Sha1, - Message: commit.Message, - URL: fmt.Sprintf("%s/commit/%s", repoLink, commit.Sha1), - Author: &api.PayloadUser{ - Name: commit.AuthorName, - Email: commit.AuthorEmail, - UserName: authorUsername, - }, - Committer: &api.PayloadUser{ - Name: commit.CommitterName, - Email: commit.CommitterEmail, - UserName: committerUsername, - }, - Added: fileStatus.Added, - Removed: fileStatus.Removed, - Modified: fileStatus.Modified, - Timestamp: commit.Timestamp, + commits[i] = apiCommit + if pc.HeadCommit != nil && pc.HeadCommit.Sha1 == commits[i].ID { + headCommit = apiCommit } } - return commits, nil + if pc.HeadCommit != nil && headCommit == nil { + var err error + headCommit, err = pc.toAPIPayloadCommit(repoPath, repoLink, pc.HeadCommit) + if err != nil { + return nil, nil, err + } + } + return commits, headCommit, nil } // AvatarLink tries to match user in database with e-mail @@ -157,13 +178,9 @@ func CommitToPushCommit(commit *git.Commit) *PushCommit { // ListToPushCommits transforms a list.List to PushCommits type. func ListToPushCommits(l *list.List) *PushCommits { var commits []*PushCommit - var actEmail string for e := l.Front(); e != nil; e = e.Next() { - commit := e.Value.(*git.Commit) - if actEmail == "" { - actEmail = commit.Committer.Email - } - commits = append(commits, CommitToPushCommit(commit)) + commit := CommitToPushCommit(e.Value.(*git.Commit)) + commits = append(commits, commit) } - return &PushCommits{l.Len(), commits, "", make(map[string]string), make(map[string]*models.User)} + return &PushCommits{commits, nil, "", make(map[string]string), make(map[string]*models.User)} } diff --git a/modules/repository/commits_test.go b/modules/repository/commits_test.go index 16677fe8a684..8e0d8bf90fa0 100644 --- a/modules/repository/commits_test.go +++ b/modules/repository/commits_test.go @@ -46,12 +46,13 @@ func TestPushCommits_ToAPIPayloadCommits(t *testing.T) { Message: "good signed commit", }, } - pushCommits.Len = len(pushCommits.Commits) + pushCommits.HeadCommit = &PushCommit{Sha1: "69554a6"} repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 16}).(*models.Repository) - payloadCommits, err := pushCommits.ToAPIPayloadCommits(repo.RepoPath(), "/user2/repo16") + payloadCommits, headCommit, err := pushCommits.ToAPIPayloadCommits(repo.RepoPath(), "/user2/repo16") assert.NoError(t, err) - assert.EqualValues(t, 3, len(payloadCommits)) + assert.Len(t, payloadCommits, 3) + assert.NotNil(t, headCommit) assert.Equal(t, "69554a6", payloadCommits[0].ID) assert.Equal(t, "not signed commit", payloadCommits[0].Message) @@ -85,6 +86,17 @@ func TestPushCommits_ToAPIPayloadCommits(t *testing.T) { assert.EqualValues(t, []string{"readme.md"}, payloadCommits[2].Added) assert.EqualValues(t, []string{}, payloadCommits[2].Removed) assert.EqualValues(t, []string{}, payloadCommits[2].Modified) + + assert.Equal(t, "69554a6", headCommit.ID) + assert.Equal(t, "not signed commit", headCommit.Message) + assert.Equal(t, "/user2/repo16/commit/69554a6", headCommit.URL) + assert.Equal(t, "User2", headCommit.Committer.Name) + assert.Equal(t, "user2", headCommit.Committer.UserName) + assert.Equal(t, "User2", headCommit.Author.Name) + assert.Equal(t, "user2", headCommit.Author.UserName) + assert.EqualValues(t, []string{}, headCommit.Added) + assert.EqualValues(t, []string{}, headCommit.Removed) + assert.EqualValues(t, []string{"readme.md"}, headCommit.Modified) } func TestPushCommits_AvatarLink(t *testing.T) { @@ -109,16 +121,15 @@ func TestPushCommits_AvatarLink(t *testing.T) { Message: "message2", }, } - pushCommits.Len = len(pushCommits.Commits) assert.Equal(t, - "https://secure.gravatar.com/avatar/ab53a2911ddf9b4817ac01ddcd3d975f?d=identicon&s=56", + "https://secure.gravatar.com/avatar/ab53a2911ddf9b4817ac01ddcd3d975f?d=identicon&s=112", pushCommits.AvatarLink("user2@example.com")) assert.Equal(t, "https://secure.gravatar.com/avatar/"+ fmt.Sprintf("%x", md5.Sum([]byte("nonexistent@example.com")))+ - "?d=identicon&s=56", + "?d=identicon&s=112", pushCommits.AvatarLink("nonexistent@example.com")) } @@ -177,7 +188,6 @@ func TestListToPushCommits(t *testing.T) { }) pushCommits := ListToPushCommits(l) - assert.Equal(t, 2, pushCommits.Len) if assert.Len(t, pushCommits.Commits, 2) { assert.Equal(t, "Message1", pushCommits.Commits[0].Message) assert.Equal(t, hexString1, pushCommits.Commits[0].Sha1) diff --git a/modules/repository/create_test.go b/modules/repository/create_test.go index d3e8bf5af1eb..65ed7806a9b2 100644 --- a/modules/repository/create_test.go +++ b/modules/repository/create_test.go @@ -21,7 +21,7 @@ func TestIncludesAllRepositoriesTeams(t *testing.T) { team := models.AssertExistsAndLoadBean(t, &models.Team{ID: teamID}).(*models.Team) assert.NoError(t, team.GetRepositories(&models.SearchTeamOptions{}), "%s: GetRepositories", team.Name) assert.Len(t, team.Repos, team.NumRepos, "%s: len repo", team.Name) - assert.Equal(t, len(repoIds), len(team.Repos), "%s: repo count", team.Name) + assert.Len(t, team.Repos, len(repoIds), "%s: repo count", team.Name) for i, rid := range repoIds { if rid > 0 { assert.True(t, team.HasRepository(rid), "%s: HasRepository(%d) %d", rid, i) diff --git a/modules/repository/hooks.go b/modules/repository/hooks.go index aba5db6719b2..ed6036851e6b 100644 --- a/modules/repository/hooks.go +++ b/modules/repository/hooks.go @@ -22,9 +22,53 @@ import ( func getHookTemplates() (hookNames, hookTpls, giteaHookTpls []string) { hookNames = []string{"pre-receive", "update", "post-receive"} hookTpls = []string{ - fmt.Sprintf("#!/usr/bin/env %s\ndata=$(cat)\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\necho \"${data}\" | \"${hook}\"\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType), - fmt.Sprintf("#!/usr/bin/env %s\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\n\"${hook}\" $1 $2 $3\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType), - fmt.Sprintf("#!/usr/bin/env %s\ndata=$(cat)\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\necho \"${data}\" | \"${hook}\"\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType), + fmt.Sprintf(`#!/usr/bin/env %s +data=$(cat) +exitcodes="" +hookname=$(basename $0) +GIT_DIR=${GIT_DIR:-$(dirname $0)/..} + +for hook in ${GIT_DIR}/hooks/${hookname}.d/*; do +test -x "${hook}" && test -f "${hook}" || continue +echo "${data}" | "${hook}" +exitcodes="${exitcodes} $?" +done + +for i in ${exitcodes}; do +[ ${i} -eq 0 ] || exit ${i} +done +`, setting.ScriptType), + fmt.Sprintf(`#!/usr/bin/env %s +exitcodes="" +hookname=$(basename $0) +GIT_DIR=${GIT_DIR:-$(dirname $0/..)} + +for hook in ${GIT_DIR}/hooks/${hookname}.d/*; do +test -x "${hook}" && test -f "${hook}" || continue +"${hook}" $1 $2 $3 +exitcodes="${exitcodes} $?" +done + +for i in ${exitcodes}; do +[ ${i} -eq 0 ] || exit ${i} +done +`, setting.ScriptType), + fmt.Sprintf(`#!/usr/bin/env %s +data=$(cat) +exitcodes="" +hookname=$(basename $0) +GIT_DIR=${GIT_DIR:-$(dirname $0)/..} + +for hook in ${GIT_DIR}/hooks/${hookname}.d/*; do +test -x "${hook}" && test -f "${hook}" || continue +echo "${data}" | "${hook}" +exitcodes="${exitcodes} $?" +done + +for i in ${exitcodes}; do +[ ${i} -eq 0 ] || exit ${i} +done +`, setting.ScriptType), } giteaHookTpls = []string{ fmt.Sprintf("#!/usr/bin/env %s\n%s hook --config=%s pre-receive\n", setting.ScriptType, util.ShellEscape(setting.AppPath), util.ShellEscape(setting.CustomConf)), diff --git a/modules/repository/repo.go b/modules/repository/repo.go index 50eb185daa9e..08531c04ed3e 100644 --- a/modules/repository/repo.go +++ b/modules/repository/repo.go @@ -7,6 +7,7 @@ package repository import ( "context" "fmt" + "io" "net/url" "path" "strings" @@ -323,64 +324,90 @@ func StoreMissingLfsObjectsInRepository(ctx context.Context, repo *models.Reposi errChan := make(chan error, 1) go lfs.SearchPointerBlobs(ctx, gitRepo, pointerChan, errChan) - err := func() error { - for pointerBlob := range pointerChan { - meta, err := models.NewLFSMetaObject(&models.LFSMetaObject{Pointer: pointerBlob.Pointer, RepositoryID: repo.ID}) - if err != nil { - return fmt.Errorf("StoreMissingLfsObjectsInRepository models.NewLFSMetaObject: %w", err) - } - if meta.Existing { - continue + downloadObjects := func(pointers []lfs.Pointer) error { + err := client.Download(ctx, pointers, func(p lfs.Pointer, content io.ReadCloser, objectError error) error { + if objectError != nil { + return objectError } - log.Trace("StoreMissingLfsObjectsInRepository: LFS OID[%s] not present in repository %s", pointerBlob.Oid, repo.FullName()) + defer content.Close() - err = func() error { - exist, err := contentStore.Exists(pointerBlob.Pointer) - if err != nil { - return fmt.Errorf("StoreMissingLfsObjectsInRepository contentStore.Exists: %w", err) - } - if !exist { - if setting.LFS.MaxFileSize > 0 && pointerBlob.Size > setting.LFS.MaxFileSize { - log.Info("LFS OID[%s] download denied because of LFS_MAX_FILE_SIZE=%d < size %d", pointerBlob.Oid, setting.LFS.MaxFileSize, pointerBlob.Size) - return nil - } - - stream, err := client.Download(ctx, pointerBlob.Oid, pointerBlob.Size) - if err != nil { - return fmt.Errorf("StoreMissingLfsObjectsInRepository: LFS OID[%s] failed to download: %w", pointerBlob.Oid, err) - } - defer stream.Close() - - if err := contentStore.Put(pointerBlob.Pointer, stream); err != nil { - return fmt.Errorf("StoreMissingLfsObjectsInRepository LFS OID[%s] contentStore.Put: %w", pointerBlob.Oid, err) - } - } else { - log.Trace("StoreMissingLfsObjectsInRepository: LFS OID[%s] already present in content store", pointerBlob.Oid) - } - return nil - }() + _, err := models.NewLFSMetaObject(&models.LFSMetaObject{Pointer: p, RepositoryID: repo.ID}) if err != nil { - if _, err2 := repo.RemoveLFSMetaObjectByOid(meta.Oid); err2 != nil { - log.Error("StoreMissingLfsObjectsInRepository RemoveLFSMetaObjectByOid[Oid: %s]: %w", meta.Oid, err2) - } + log.Error("Error creating LFS meta object %v: %v", p, err) + return err + } - select { - case <-ctx.Done(): - return nil - default: + if err := contentStore.Put(p, content); err != nil { + log.Error("Error storing content for LFS meta object %v: %v", p, err) + if _, err2 := repo.RemoveLFSMetaObjectByOid(p.Oid); err2 != nil { + log.Error("Error removing LFS meta object %v: %v", p, err2) } return err } + return nil + }) + if err != nil { + select { + case <-ctx.Done(): + return nil + default: + } } - return nil - }() - if err != nil { return err } + var batch []lfs.Pointer + for pointerBlob := range pointerChan { + meta, err := repo.GetLFSMetaObjectByOid(pointerBlob.Oid) + if err != nil && err != models.ErrLFSObjectNotExist { + log.Error("Error querying LFS meta object %v: %v", pointerBlob.Pointer, err) + return err + } + if meta != nil { + log.Trace("Skipping unknown LFS meta object %v", pointerBlob.Pointer) + continue + } + + log.Trace("LFS object %v not present in repository %s", pointerBlob.Pointer, repo.FullName()) + + exist, err := contentStore.Exists(pointerBlob.Pointer) + if err != nil { + log.Error("Error checking if LFS object %v exists: %v", pointerBlob.Pointer, err) + return err + } + + if exist { + log.Trace("LFS object %v already present; creating meta object", pointerBlob.Pointer) + _, err := models.NewLFSMetaObject(&models.LFSMetaObject{Pointer: pointerBlob.Pointer, RepositoryID: repo.ID}) + if err != nil { + log.Error("Error creating LFS meta object %v: %v", pointerBlob.Pointer, err) + return err + } + } else { + if setting.LFS.MaxFileSize > 0 && pointerBlob.Size > setting.LFS.MaxFileSize { + log.Info("LFS object %v download denied because of LFS_MAX_FILE_SIZE=%d < size %d", pointerBlob.Pointer, setting.LFS.MaxFileSize, pointerBlob.Size) + continue + } + + batch = append(batch, pointerBlob.Pointer) + if len(batch) >= client.BatchSize() { + if err := downloadObjects(batch); err != nil { + return err + } + batch = nil + } + } + } + if len(batch) > 0 { + if err := downloadObjects(batch); err != nil { + return err + } + } + err, has := <-errChan if has { + log.Error("Error enumerating LFS objects for repository: %v", err) return err } diff --git a/modules/secret/secret.go b/modules/secret/secret.go index 2b6e22cc6c7f..976924ac606d 100644 --- a/modules/secret/secret.go +++ b/modules/secret/secret.go @@ -13,29 +13,18 @@ import ( "encoding/hex" "errors" "io" + + "code.gitea.io/gitea/modules/util" ) -// New creats a new secret +// New creates a new secret func New() (string, error) { - return NewWithLength(32) + return NewWithLength(44) } // NewWithLength creates a new secret for a given length func NewWithLength(length int64) (string, error) { - return randomString(length) -} - -func randomBytes(len int64) ([]byte, error) { - b := make([]byte, len) - if _, err := rand.Read(b); err != nil { - return nil, err - } - return b, nil -} - -func randomString(len int64) (string, error) { - b, err := randomBytes(len) - return base64.URLEncoding.EncodeToString(b), err + return util.RandomString(length) } // AesEncrypt encrypts text and given key with AES. diff --git a/modules/secret/secret_test.go b/modules/secret/secret_test.go index 6531ffbebc74..f3a88eecc825 100644 --- a/modules/secret/secret_test.go +++ b/modules/secret/secret_test.go @@ -13,7 +13,7 @@ import ( func TestNew(t *testing.T) { result, err := New() assert.NoError(t, err) - assert.True(t, len(result) > 32) + assert.True(t, len(result) == 44) result2, err := New() assert.NoError(t, err) diff --git a/modules/setting/cache.go b/modules/setting/cache.go index 7bfea919618e..2bfe2318f547 100644 --- a/modules/setting/cache.go +++ b/modules/setting/cache.go @@ -58,11 +58,16 @@ func newCacheService() { log.Fatal("Failed to map Cache settings: %v", err) } - CacheService.Adapter = sec.Key("ADAPTER").In("memory", []string{"memory", "redis", "memcache"}) + CacheService.Adapter = sec.Key("ADAPTER").In("memory", []string{"memory", "redis", "memcache", "twoqueue"}) switch CacheService.Adapter { case "memory": case "redis", "memcache": CacheService.Conn = strings.Trim(sec.Key("HOST").String(), "\" ") + case "twoqueue": + CacheService.Conn = strings.TrimSpace(sec.Key("HOST").String()) + if CacheService.Conn == "" { + CacheService.Conn = "50000" + } case "": // disable cache CacheService.Enabled = false default: diff --git a/modules/setting/git.go b/modules/setting/git.go index 308d94894ba7..aa838a8d641a 100644 --- a/modules/setting/git.go +++ b/modules/setting/git.go @@ -7,7 +7,6 @@ package setting import ( "time" - "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" ) @@ -19,13 +18,14 @@ var ( MaxGitDiffLines int MaxGitDiffLineCharacters int MaxGitDiffFiles int - CommitsRangeSize int - BranchesRangeSize int + CommitsRangeSize int // CommitsRangeSize the default commits range size + BranchesRangeSize int // BranchesRangeSize the default branches range size VerbosePush bool VerbosePushDelay time.Duration GCArgs []string `ini:"GC_ARGS" delim:" "` EnableAutoGitWireProtocol bool PullRequestPushMessage bool + LargeObjectThreshold int64 Timeout struct { Default int Migrate int @@ -46,6 +46,7 @@ var ( GCArgs: []string{}, EnableAutoGitWireProtocol: true, PullRequestPushMessage: true, + LargeObjectThreshold: 1024 * 1024, Timeout: struct { Default int Migrate int @@ -54,7 +55,7 @@ var ( Pull int GC int `ini:"GC"` }{ - Default: int(git.DefaultCommandExecutionTimeout / time.Second), + Default: 360, Migrate: 600, Mirror: 300, Clone: 300, @@ -68,35 +69,4 @@ func newGit() { if err := Cfg.Section("git").MapTo(&Git); err != nil { log.Fatal("Failed to map Git settings: %v", err) } - if err := git.SetExecutablePath(Git.Path); err != nil { - log.Fatal("Failed to initialize Git settings: %v", err) - } - git.DefaultCommandExecutionTimeout = time.Duration(Git.Timeout.Default) * time.Second - - version, err := git.LocalVersion() - if err != nil { - log.Fatal("Error retrieving git version: %v", err) - } - - // force cleanup args - git.GlobalCommandArgs = []string{} - - if git.CheckGitVersionAtLeast("2.9") == nil { - // Explicitly disable credential helper, otherwise Git credentials might leak - git.GlobalCommandArgs = append(git.GlobalCommandArgs, "-c", "credential.helper=") - } - - var format = "Git Version: %s" - var args = []interface{}{version.Original()} - // Since git wire protocol has been released from git v2.18 - if Git.EnableAutoGitWireProtocol && git.CheckGitVersionAtLeast("2.18") == nil { - git.GlobalCommandArgs = append(git.GlobalCommandArgs, "-c", "protocol.version=2") - format += ", Wire Protocol %s Enabled" - args = append(args, "Version 2") // for focus color - } - - git.CommitsRangeSize = Git.CommitsRangeSize - git.BranchesRangeSize = Git.BranchesRangeSize - - log.Info(format, args...) } diff --git a/modules/setting/indexer.go b/modules/setting/indexer.go index 842ef8ea4116..2e9d8b288356 100644 --- a/modules/setting/indexer.go +++ b/modules/setting/indexer.go @@ -5,7 +5,6 @@ package setting import ( - "path" "path/filepath" "strings" "time" @@ -29,10 +28,10 @@ var ( IssuePath string IssueConnStr string IssueIndexerName string - IssueQueueType string - IssueQueueDir string - IssueQueueConnStr string - IssueQueueBatchNumber int + IssueQueueType string // DEPRECATED - replaced by queue.issue_indexer + IssueQueueDir string // DEPRECATED - replaced by queue.issue_indexer + IssueQueueConnStr string // DEPRECATED - replaced by queue.issue_indexer + IssueQueueBatchNumber int // DEPRECATED - replaced by queue.issue_indexer StartupTimeout time.Duration RepoIndexerEnabled bool @@ -40,20 +39,17 @@ var ( RepoPath string RepoConnStr string RepoIndexerName string - UpdateQueueLength int + UpdateQueueLength int // DEPRECATED - replaced by queue.issue_indexer MaxIndexerFileSize int64 IncludePatterns []glob.Glob ExcludePatterns []glob.Glob ExcludeVendored bool }{ - IssueType: "bleve", - IssuePath: "indexers/issues.bleve", - IssueConnStr: "", - IssueIndexerName: "gitea_issues", - IssueQueueType: LevelQueueType, - IssueQueueDir: "indexers/issues.queue", - IssueQueueConnStr: "", - IssueQueueBatchNumber: 20, + IssueType: "bleve", + IssuePath: "indexers/issues.bleve", + IssueConnStr: "", + IssueIndexerName: "gitea_issues", + IssueQueueType: LevelQueueType, RepoIndexerEnabled: false, RepoType: "bleve", @@ -68,23 +64,25 @@ var ( func newIndexerService() { sec := Cfg.Section("indexer") Indexer.IssueType = sec.Key("ISSUE_INDEXER_TYPE").MustString("bleve") - Indexer.IssuePath = sec.Key("ISSUE_INDEXER_PATH").MustString(path.Join(AppDataPath, "indexers/issues.bleve")) + Indexer.IssuePath = filepath.ToSlash(sec.Key("ISSUE_INDEXER_PATH").MustString(filepath.ToSlash(filepath.Join(AppDataPath, "indexers/issues.bleve")))) if !filepath.IsAbs(Indexer.IssuePath) { - Indexer.IssuePath = path.Join(AppWorkPath, Indexer.IssuePath) + Indexer.IssuePath = filepath.ToSlash(filepath.Join(AppWorkPath, Indexer.IssuePath)) } Indexer.IssueConnStr = sec.Key("ISSUE_INDEXER_CONN_STR").MustString(Indexer.IssueConnStr) Indexer.IssueIndexerName = sec.Key("ISSUE_INDEXER_NAME").MustString(Indexer.IssueIndexerName) - Indexer.IssueQueueType = sec.Key("ISSUE_INDEXER_QUEUE_TYPE").MustString(LevelQueueType) - Indexer.IssueQueueDir = sec.Key("ISSUE_INDEXER_QUEUE_DIR").MustString(path.Join(AppDataPath, "indexers/issues.queue")) + // The following settings are deprecated and can be overridden by settings in [queue] or [queue.issue_indexer] + Indexer.IssueQueueType = sec.Key("ISSUE_INDEXER_QUEUE_TYPE").MustString("") + Indexer.IssueQueueDir = filepath.ToSlash(sec.Key("ISSUE_INDEXER_QUEUE_DIR").MustString("")) Indexer.IssueQueueConnStr = sec.Key("ISSUE_INDEXER_QUEUE_CONN_STR").MustString("") - Indexer.IssueQueueBatchNumber = sec.Key("ISSUE_INDEXER_QUEUE_BATCH_NUMBER").MustInt(20) + Indexer.IssueQueueBatchNumber = sec.Key("ISSUE_INDEXER_QUEUE_BATCH_NUMBER").MustInt(0) + Indexer.UpdateQueueLength = sec.Key("UPDATE_BUFFER_LEN").MustInt(0) Indexer.RepoIndexerEnabled = sec.Key("REPO_INDEXER_ENABLED").MustBool(false) Indexer.RepoType = sec.Key("REPO_INDEXER_TYPE").MustString("bleve") - Indexer.RepoPath = sec.Key("REPO_INDEXER_PATH").MustString(path.Join(AppDataPath, "indexers/repos.bleve")) + Indexer.RepoPath = filepath.ToSlash(sec.Key("REPO_INDEXER_PATH").MustString(filepath.ToSlash(filepath.Join(AppDataPath, "indexers/repos.bleve")))) if !filepath.IsAbs(Indexer.RepoPath) { - Indexer.RepoPath = path.Join(AppWorkPath, Indexer.RepoPath) + Indexer.RepoPath = filepath.ToSlash(filepath.Join(AppWorkPath, Indexer.RepoPath)) } Indexer.RepoConnStr = sec.Key("REPO_INDEXER_CONN_STR").MustString("") Indexer.RepoIndexerName = sec.Key("REPO_INDEXER_NAME").MustString("gitea_codes") @@ -92,7 +90,6 @@ func newIndexerService() { Indexer.IncludePatterns = IndexerGlobFromString(sec.Key("REPO_INDEXER_INCLUDE").MustString("")) Indexer.ExcludePatterns = IndexerGlobFromString(sec.Key("REPO_INDEXER_EXCLUDE").MustString("")) Indexer.ExcludeVendored = sec.Key("REPO_INDEXER_EXCLUDE_VENDORED").MustBool(true) - Indexer.UpdateQueueLength = sec.Key("UPDATE_BUFFER_LEN").MustInt(20) Indexer.MaxIndexerFileSize = sec.Key("MAX_FILE_SIZE").MustInt64(1024 * 1024) Indexer.StartupTimeout = sec.Key("STARTUP_TIMEOUT").MustDuration(30 * time.Second) } @@ -104,7 +101,7 @@ func IndexerGlobFromString(globstr string) []glob.Glob { expr = strings.TrimSpace(expr) if expr != "" { if g, err := glob.Compile(expr, '.', '/'); err != nil { - log.Info("Invalid glob expresion '%s' (skipped): %v", expr, err) + log.Info("Invalid glob expression '%s' (skipped): %v", expr, err) } else { extarr = append(extarr, g) } diff --git a/modules/setting/lfs.go b/modules/setting/lfs.go index ab475bbeb4b3..a4bbd3c3ff37 100644 --- a/modules/setting/lfs.go +++ b/modules/setting/lfs.go @@ -6,14 +6,10 @@ package setting import ( "encoding/base64" - "os" - "path/filepath" "time" "code.gitea.io/gitea/modules/generate" - "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" - "code.gitea.io/gitea/modules/util" ini "gopkg.in/ini.v1" ) @@ -57,55 +53,16 @@ func newLFSService() { n, err := base64.RawURLEncoding.Decode(LFS.JWTSecretBytes, []byte(LFS.JWTSecretBase64)) if err != nil || n != 32 { - LFS.JWTSecretBase64, err = generate.NewJwtSecret() + LFS.JWTSecretBase64, err = generate.NewJwtSecretBase64() if err != nil { log.Fatal("Error generating JWT Secret for custom config: %v", err) return } // Save secret - cfg := ini.Empty() - isFile, err := util.IsFile(CustomConf) - if err != nil { - log.Error("Unable to check if %s is a file. Error: %v", CustomConf, err) - } - if isFile { - // Keeps custom settings if there is already something. - if err := cfg.Append(CustomConf); err != nil { - log.Error("Failed to load custom conf '%s': %v", CustomConf, err) - } - } - - cfg.Section("server").Key("LFS_JWT_SECRET").SetValue(LFS.JWTSecretBase64) - - if err := os.MkdirAll(filepath.Dir(CustomConf), os.ModePerm); err != nil { - log.Fatal("Failed to create '%s': %v", CustomConf, err) - } - if err := cfg.SaveTo(CustomConf); err != nil { - log.Fatal("Error saving generated JWT Secret to custom config: %v", err) - return - } - } - } -} - -// CheckLFSVersion will check lfs version, if not satisfied, then disable it. -func CheckLFSVersion() { - if LFS.StartServer { - //Disable LFS client hooks if installed for the current OS user - //Needs at least git v2.1.2 - - err := git.LoadGitVersion() - if err != nil { - log.Fatal("Error retrieving git version: %v", err) - } - - if git.CheckGitVersionAtLeast("2.1.2") != nil { - LFS.StartServer = false - log.Error("LFS server support needs at least Git v2.1.2") - } else { - git.GlobalCommandArgs = append(git.GlobalCommandArgs, "-c", "filter.lfs.required=", - "-c", "filter.lfs.smudge=", "-c", "filter.lfs.clean=") + CreateOrAppendToCustomConf(func(cfg *ini.File) { + cfg.Section("server").Key("LFS_JWT_SECRET").SetValue(LFS.JWTSecretBase64) + }) } } } diff --git a/modules/setting/log.go b/modules/setting/log.go index 44017b11383d..0fb108c93dcf 100644 --- a/modules/setting/log.go +++ b/modules/setting/log.go @@ -287,6 +287,7 @@ func newLogService() { options := newDefaultLogOptions() options.bufferLength = Cfg.Section("log").Key("BUFFER_LEN").MustInt64(10000) + EnableSSHLog = Cfg.Section("log").Key("ENABLE_SSH_LOG").MustBool(false) description := LogDescription{ Name: log.DEFAULT, diff --git a/modules/setting/markup.go b/modules/setting/markup.go index f0849a863a56..31ec1dd2eb6e 100644 --- a/modules/setting/markup.go +++ b/modules/setting/markup.go @@ -15,29 +15,34 @@ import ( // ExternalMarkupRenderers represents the external markup renderers var ( - ExternalMarkupRenderers []MarkupRenderer + ExternalMarkupRenderers []*MarkupRenderer ExternalSanitizerRules []MarkupSanitizerRule ) // MarkupRenderer defines the external parser configured in ini type MarkupRenderer struct { - Enabled bool - MarkupName string - Command string - FileExtensions []string - IsInputFile bool - NeedPostProcess bool + Enabled bool + MarkupName string + Command string + FileExtensions []string + IsInputFile bool + NeedPostProcess bool + MarkupSanitizerRules []MarkupSanitizerRule } // MarkupSanitizerRule defines the policy for whitelisting attributes on // certain elements. type MarkupSanitizerRule struct { - Element string - AllowAttr string - Regexp *regexp.Regexp + Element string + AllowAttr string + Regexp *regexp.Regexp + AllowDataURIImages bool } func newMarkup() { + ExternalMarkupRenderers = make([]*MarkupRenderer, 0, 10) + ExternalSanitizerRules = make([]MarkupSanitizerRule, 0, 10) + for _, sec := range Cfg.Section("markup").ChildSections() { name := strings.TrimPrefix(sec.Name(), "markup.") if name == "" { @@ -54,50 +59,62 @@ func newMarkup() { } func newMarkupSanitizer(name string, sec *ini.Section) { - haveElement := sec.HasKey("ELEMENT") - haveAttr := sec.HasKey("ALLOW_ATTR") - haveRegexp := sec.HasKey("REGEXP") - - if !haveElement && !haveAttr && !haveRegexp { - log.Warn("Skipping empty section: markup.%s.", name) - return + rule, ok := createMarkupSanitizerRule(name, sec) + if ok { + if strings.HasPrefix(name, "sanitizer.") { + names := strings.SplitN(strings.TrimPrefix(name, "sanitizer."), ".", 2) + name = names[0] + } + for _, renderer := range ExternalMarkupRenderers { + if name == renderer.MarkupName { + renderer.MarkupSanitizerRules = append(renderer.MarkupSanitizerRules, rule) + return + } + } + ExternalSanitizerRules = append(ExternalSanitizerRules, rule) } +} - if !haveElement || !haveAttr || !haveRegexp { - log.Error("Missing required keys from markup.%s. Must have all three of ELEMENT, ALLOW_ATTR, and REGEXP defined!", name) - return +func createMarkupSanitizerRule(name string, sec *ini.Section) (MarkupSanitizerRule, bool) { + var rule MarkupSanitizerRule + + ok := false + if sec.HasKey("ALLOW_DATA_URI_IMAGES") { + rule.AllowDataURIImages = sec.Key("ALLOW_DATA_URI_IMAGES").MustBool(false) + ok = true } - elements := sec.Key("ELEMENT").Value() - allowAttrs := sec.Key("ALLOW_ATTR").Value() - regexpStr := sec.Key("REGEXP").Value() + if sec.HasKey("ELEMENT") || sec.HasKey("ALLOW_ATTR") { + rule.Element = sec.Key("ELEMENT").Value() + rule.AllowAttr = sec.Key("ALLOW_ATTR").Value() - if regexpStr == "" { - rule := MarkupSanitizerRule{ - Element: elements, - AllowAttr: allowAttrs, - Regexp: nil, + if rule.Element == "" || rule.AllowAttr == "" { + log.Error("Missing required values from markup.%s. Must have ELEMENT and ALLOW_ATTR defined!", name) + return rule, false } - ExternalSanitizerRules = append(ExternalSanitizerRules, rule) - return - } + regexpStr := sec.Key("REGEXP").Value() + if regexpStr != "" { + // Validate when parsing the config that this is a valid regular + // expression. Then we can use regexp.MustCompile(...) later. + compiled, err := regexp.Compile(regexpStr) + if err != nil { + log.Error("In markup.%s: REGEXP (%s) failed to compile: %v", name, regexpStr, err) + return rule, false + } + + rule.Regexp = compiled + } - // Validate when parsing the config that this is a valid regular - // expression. Then we can use regexp.MustCompile(...) later. - compiled, err := regexp.Compile(regexpStr) - if err != nil { - log.Error("In module.%s: REGEXP (%s) at definition %d failed to compile: %v", regexpStr, name, err) - return + ok = true } - rule := MarkupSanitizerRule{ - Element: elements, - AllowAttr: allowAttrs, - Regexp: compiled, + if !ok { + log.Error("Missing required keys from markup.%s. Must have ELEMENT and ALLOW_ATTR or ALLOW_DATA_URI_IMAGES defined!", name) + return rule, false } - ExternalSanitizerRules = append(ExternalSanitizerRules, rule) + return rule, true } func newMarkupRenderer(name string, sec *ini.Section) { @@ -124,7 +141,7 @@ func newMarkupRenderer(name string, sec *ini.Section) { return } - ExternalMarkupRenderers = append(ExternalMarkupRenderers, MarkupRenderer{ + ExternalMarkupRenderers = append(ExternalMarkupRenderers, &MarkupRenderer{ Enabled: sec.Key("ENABLED").MustBool(false), MarkupName: name, FileExtensions: exts, diff --git a/modules/setting/mime_type_map.go b/modules/setting/mime_type_map.go new file mode 100644 index 000000000000..5c1fc7f71a41 --- /dev/null +++ b/modules/setting/mime_type_map.go @@ -0,0 +1,31 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package setting + +import "strings" + +var ( + // MimeTypeMap defines custom mime type mapping settings + MimeTypeMap = struct { + Enabled bool + Map map[string]string + }{ + Enabled: false, + Map: map[string]string{}, + } +) + +func newMimeTypeMap() { + sec := Cfg.Section("repository.mimetype_mapping") + keys := sec.Keys() + m := make(map[string]string, len(keys)) + for _, key := range keys { + m[strings.ToLower(key.Name())] = key.Value() + } + MimeTypeMap.Map = m + if len(keys) > 0 { + MimeTypeMap.Enabled = true + } +} diff --git a/modules/setting/oauth2_client.go b/modules/setting/oauth2_client.go index a336563c9a45..6cfd99156f08 100644 --- a/modules/setting/oauth2_client.go +++ b/modules/setting/oauth2_client.go @@ -71,10 +71,10 @@ func newOAuth2Client() { OAuth2Client.Username = OAuth2UsernameNickname } OAuth2Client.UpdateAvatar = sec.Key("UPDATE_AVATAR").MustBool() - OAuth2Client.AccountLinking = OAuth2AccountLinkingType(sec.Key("ACCOUNT_LINKING").MustString(string(OAuth2AccountLinkingDisabled))) + OAuth2Client.AccountLinking = OAuth2AccountLinkingType(sec.Key("ACCOUNT_LINKING").MustString(string(OAuth2AccountLinkingLogin))) if !OAuth2Client.AccountLinking.isValid() { - log.Warn("Account linking setting is not valid: '%s', will fallback to '%s'", OAuth2Client.AccountLinking, OAuth2AccountLinkingDisabled) - OAuth2Client.AccountLinking = OAuth2AccountLinkingDisabled + log.Warn("Account linking setting is not valid: '%s', will fallback to '%s'", OAuth2Client.AccountLinking, OAuth2AccountLinkingLogin) + OAuth2Client.AccountLinking = OAuth2AccountLinkingLogin } } diff --git a/modules/setting/queue.go b/modules/setting/queue.go index 236560456299..4ff02b61ebcb 100644 --- a/modules/setting/queue.go +++ b/modules/setting/queue.go @@ -48,7 +48,7 @@ func GetQueueSettings(name string) QueueSettings { q.Name = name // DataDir is not directly inheritable - q.DataDir = filepath.Join(Queue.DataDir, name) + q.DataDir = filepath.ToSlash(filepath.Join(Queue.DataDir, "common")) // QueueName is not directly inheritable either q.QueueName = name + Queue.QueueName for _, key := range sec.Keys() { @@ -65,7 +65,7 @@ func GetQueueSettings(name string) QueueSettings { q.SetName = q.QueueName + Queue.SetName } if !filepath.IsAbs(q.DataDir) { - q.DataDir = filepath.Join(AppDataPath, q.DataDir) + q.DataDir = filepath.ToSlash(filepath.Join(AppDataPath, q.DataDir)) } _, _ = sec.NewKey("DATADIR", q.DataDir) @@ -91,23 +91,24 @@ func GetQueueSettings(name string) QueueSettings { // This is exported for tests to be able to use the queue func NewQueueService() { sec := Cfg.Section("queue") - Queue.DataDir = sec.Key("DATADIR").MustString("queues/") + Queue.DataDir = filepath.ToSlash(sec.Key("DATADIR").MustString("queues/")) if !filepath.IsAbs(Queue.DataDir) { - Queue.DataDir = filepath.Join(AppDataPath, Queue.DataDir) + Queue.DataDir = filepath.ToSlash(filepath.Join(AppDataPath, Queue.DataDir)) } Queue.QueueLength = sec.Key("LENGTH").MustInt(20) Queue.BatchLength = sec.Key("BATCH_LENGTH").MustInt(20) Queue.ConnectionString = sec.Key("CONN_STR").MustString("") + defaultType := sec.Key("TYPE").String() Queue.Type = sec.Key("TYPE").MustString("persistable-channel") Queue.Network, Queue.Addresses, Queue.Password, Queue.DBIndex, _ = ParseQueueConnStr(Queue.ConnectionString) Queue.WrapIfNecessary = sec.Key("WRAP_IF_NECESSARY").MustBool(true) Queue.MaxAttempts = sec.Key("MAX_ATTEMPTS").MustInt(10) Queue.Timeout = sec.Key("TIMEOUT").MustDuration(GracefulHammerTime + 30*time.Second) - Queue.Workers = sec.Key("WORKERS").MustInt(1) + Queue.Workers = sec.Key("WORKERS").MustInt(0) Queue.MaxWorkers = sec.Key("MAX_WORKERS").MustInt(10) Queue.BlockTimeout = sec.Key("BLOCK_TIMEOUT").MustDuration(1 * time.Second) Queue.BoostTimeout = sec.Key("BOOST_TIMEOUT").MustDuration(5 * time.Minute) - Queue.BoostWorkers = sec.Key("BOOST_WORKERS").MustInt(5) + Queue.BoostWorkers = sec.Key("BOOST_WORKERS").MustInt(1) Queue.QueueName = sec.Key("QUEUE_NAME").MustString("_queue") Queue.SetName = sec.Key("SET_NAME").MustString("") @@ -117,7 +118,7 @@ func NewQueueService() { for _, key := range section.Keys() { sectionMap[key.Name()] = true } - if _, ok := sectionMap["TYPE"]; !ok { + if _, ok := sectionMap["TYPE"]; !ok && defaultType == "" { switch Indexer.IssueQueueType { case LevelQueueType: _, _ = section.NewKey("TYPE", "level") @@ -125,21 +126,23 @@ func NewQueueService() { _, _ = section.NewKey("TYPE", "persistable-channel") case RedisQueueType: _, _ = section.NewKey("TYPE", "redis") + case "": + _, _ = section.NewKey("TYPE", "level") default: log.Fatal("Unsupported indexer queue type: %v", Indexer.IssueQueueType) } } - if _, ok := sectionMap["LENGTH"]; !ok { + if _, ok := sectionMap["LENGTH"]; !ok && Indexer.UpdateQueueLength != 0 { _, _ = section.NewKey("LENGTH", fmt.Sprintf("%d", Indexer.UpdateQueueLength)) } - if _, ok := sectionMap["BATCH_LENGTH"]; !ok { + if _, ok := sectionMap["BATCH_LENGTH"]; !ok && Indexer.IssueQueueBatchNumber != 0 { _, _ = section.NewKey("BATCH_LENGTH", fmt.Sprintf("%d", Indexer.IssueQueueBatchNumber)) } - if _, ok := sectionMap["DATADIR"]; !ok { + if _, ok := sectionMap["DATADIR"]; !ok && Indexer.IssueQueueDir != "" { _, _ = section.NewKey("DATADIR", Indexer.IssueQueueDir) } - if _, ok := sectionMap["CONN_STR"]; !ok { + if _, ok := sectionMap["CONN_STR"]; !ok && Indexer.IssueQueueConnStr != "" { _, _ = section.NewKey("CONN_STR", Indexer.IssueQueueConnStr) } diff --git a/modules/setting/repository.go b/modules/setting/repository.go index a6fc73651a30..c2a6357d9462 100644 --- a/modules/setting/repository.go +++ b/modules/setting/repository.go @@ -78,6 +78,7 @@ var ( DefaultMergeMessageAllAuthors bool DefaultMergeMessageMaxApprovers int DefaultMergeMessageOfficialApproversOnly bool + PopulateSquashCommentWithCommitMessages bool } `ini:"repository.pull-request"` // Issue Setting @@ -199,6 +200,7 @@ var ( DefaultMergeMessageAllAuthors bool DefaultMergeMessageMaxApprovers int DefaultMergeMessageOfficialApproversOnly bool + PopulateSquashCommentWithCommitMessages bool }{ WorkInProgressPrefixes: []string{"WIP:", "[WIP]"}, // Same as GitHub. See @@ -210,6 +212,7 @@ var ( DefaultMergeMessageAllAuthors: false, DefaultMergeMessageMaxApprovers: 10, DefaultMergeMessageOfficialApproversOnly: true, + PopulateSquashCommentWithCommitMessages: false, }, // Issue settings @@ -248,6 +251,10 @@ var ( } RepoRootPath string ScriptType = "bash" + + RepoArchive = struct { + Storage + }{} ) func newRepository() { @@ -325,4 +332,6 @@ func newRepository() { if !filepath.IsAbs(Repository.Upload.TempPath) { Repository.Upload.TempPath = path.Join(AppWorkPath, Repository.Upload.TempPath) } + + RepoArchive.Storage = getStorage("repo-archive", "", nil) } diff --git a/modules/setting/service.go b/modules/setting/service.go index 9696e9864183..dbabfb8400ad 100644 --- a/modules/setting/service.go +++ b/modules/setting/service.go @@ -6,6 +6,7 @@ package setting import ( "regexp" + "strings" "time" "code.gitea.io/gitea/modules/log" @@ -13,7 +14,11 @@ import ( ) // Service settings -var Service struct { +var Service = struct { + DefaultUserVisibility string + DefaultUserVisibilityMode structs.VisibleType + AllowedUserVisibilityModes []string + AllowedUserVisibilityModesSlice AllowedVisibility `ini:"-"` DefaultOrgVisibility string DefaultOrgVisibilityMode structs.VisibleType ActiveCodeLives int @@ -23,6 +28,7 @@ var Service struct { EmailDomainWhitelist []string EmailDomainBlocklist []string DisableRegistration bool + AllowOnlyInternalRegistration bool AllowOnlyExternalRegistration bool ShowRegistrationButton bool ShowMilestonesDashboardPage bool @@ -54,6 +60,7 @@ var Service struct { AutoWatchOnChanges bool DefaultOrgMemberVisible bool UserDeleteWithCommentsMaxTime time.Duration + ValidSiteURLSchemes []string // OpenID settings EnableOpenIDSignIn bool @@ -66,6 +73,29 @@ var Service struct { RequireSigninView bool `ini:"REQUIRE_SIGNIN_VIEW"` DisableUsersPage bool `ini:"DISABLE_USERS_PAGE"` } `ini:"service.explore"` +}{ + AllowedUserVisibilityModesSlice: []bool{true, true, true}, +} + +// AllowedVisibility store in a 3 item bool array what is allowed +type AllowedVisibility []bool + +// IsAllowedVisibility check if a AllowedVisibility allow a specific VisibleType +func (a AllowedVisibility) IsAllowedVisibility(t structs.VisibleType) bool { + if int(t) >= len(a) { + return false + } + return a[t] +} + +// ToVisibleTypeSlice convert a AllowedVisibility into a VisibleType slice +func (a AllowedVisibility) ToVisibleTypeSlice() (result []structs.VisibleType) { + for i, v := range a { + if v { + result = append(result, structs.VisibleType(i)) + } + } + return } func newService() { @@ -73,7 +103,12 @@ func newService() { Service.ActiveCodeLives = sec.Key("ACTIVE_CODE_LIVE_MINUTES").MustInt(180) Service.ResetPwdCodeLives = sec.Key("RESET_PASSWD_CODE_LIVE_MINUTES").MustInt(180) Service.DisableRegistration = sec.Key("DISABLE_REGISTRATION").MustBool() + Service.AllowOnlyInternalRegistration = sec.Key("ALLOW_ONLY_INTERNAL_REGISTRATION").MustBool() Service.AllowOnlyExternalRegistration = sec.Key("ALLOW_ONLY_EXTERNAL_REGISTRATION").MustBool() + if Service.AllowOnlyExternalRegistration && Service.AllowOnlyInternalRegistration { + log.Warn("ALLOW_ONLY_INTERNAL_REGISTRATION and ALLOW_ONLY_EXTERNAL_REGISTRATION are true - disabling registration") + Service.DisableRegistration = true + } if !sec.Key("REGISTER_EMAIL_CONFIRM").MustBool() { Service.RegisterManualConfirm = sec.Key("REGISTER_MANUAL_CONFIRM").MustBool(false) } else { @@ -110,10 +145,29 @@ func newService() { Service.EnableUserHeatmap = sec.Key("ENABLE_USER_HEATMAP").MustBool(true) Service.AutoWatchNewRepos = sec.Key("AUTO_WATCH_NEW_REPOS").MustBool(true) Service.AutoWatchOnChanges = sec.Key("AUTO_WATCH_ON_CHANGES").MustBool(false) + Service.DefaultUserVisibility = sec.Key("DEFAULT_USER_VISIBILITY").In("public", structs.ExtractKeysFromMapString(structs.VisibilityModes)) + Service.DefaultUserVisibilityMode = structs.VisibilityModes[Service.DefaultUserVisibility] + Service.AllowedUserVisibilityModes = sec.Key("ALLOWED_USER_VISIBILITY_MODES").Strings(",") + if len(Service.AllowedUserVisibilityModes) != 0 { + Service.AllowedUserVisibilityModesSlice = []bool{false, false, false} + for _, sMode := range Service.AllowedUserVisibilityModes { + Service.AllowedUserVisibilityModesSlice[structs.VisibilityModes[sMode]] = true + } + } Service.DefaultOrgVisibility = sec.Key("DEFAULT_ORG_VISIBILITY").In("public", structs.ExtractKeysFromMapString(structs.VisibilityModes)) Service.DefaultOrgVisibilityMode = structs.VisibilityModes[Service.DefaultOrgVisibility] Service.DefaultOrgMemberVisible = sec.Key("DEFAULT_ORG_MEMBER_VISIBLE").MustBool() Service.UserDeleteWithCommentsMaxTime = sec.Key("USER_DELETE_WITH_COMMENTS_MAX_TIME").MustDuration(0) + sec.Key("VALID_SITE_URL_SCHEMES").MustString("http,https") + Service.ValidSiteURLSchemes = sec.Key("VALID_SITE_URL_SCHEMES").Strings(",") + schemes := make([]string, len(Service.ValidSiteURLSchemes)) + for _, scheme := range Service.ValidSiteURLSchemes { + scheme = strings.ToLower(strings.TrimSpace(scheme)) + if scheme != "" { + schemes = append(schemes, scheme) + } + } + Service.ValidSiteURLSchemes = schemes if err := Cfg.Section("service.explore").MapTo(&Service.Explore); err != nil { log.Fatal("Failed to map service.explore settings: %v", err) diff --git a/modules/setting/session.go b/modules/setting/session.go index 97666c5e53da..bce73b51da64 100644 --- a/modules/setting/session.go +++ b/modules/setting/session.go @@ -15,7 +15,7 @@ import ( ) var ( - // SessionConfig difines Session settings + // SessionConfig defines Session settings SessionConfig = struct { Provider string // Provider configuration, it's corresponding to provider. diff --git a/modules/setting/setting.go b/modules/setting/setting.go index aef0d867006b..e3da5796e426 100644 --- a/modules/setting/setting.go +++ b/modules/setting/setting.go @@ -20,6 +20,7 @@ import ( "runtime" "strconv" "strings" + "text/template" "time" "code.gitea.io/gitea/modules/generate" @@ -117,48 +118,57 @@ var ( GracefulRestartable bool GracefulHammerTime time.Duration StartupTimeout time.Duration + PerWriteTimeout = 30 * time.Second + PerWritePerKbTimeout = 10 * time.Second StaticURLPrefix string AbsoluteAssetURL string SSH = struct { - Disabled bool `ini:"DISABLE_SSH"` - StartBuiltinServer bool `ini:"START_SSH_SERVER"` - BuiltinServerUser string `ini:"BUILTIN_SSH_SERVER_USER"` - Domain string `ini:"SSH_DOMAIN"` - Port int `ini:"SSH_PORT"` - ListenHost string `ini:"SSH_LISTEN_HOST"` - ListenPort int `ini:"SSH_LISTEN_PORT"` - RootPath string `ini:"SSH_ROOT_PATH"` - ServerCiphers []string `ini:"SSH_SERVER_CIPHERS"` - ServerKeyExchanges []string `ini:"SSH_SERVER_KEY_EXCHANGES"` - ServerMACs []string `ini:"SSH_SERVER_MACS"` - ServerHostKeys []string `ini:"SSH_SERVER_HOST_KEYS"` - KeyTestPath string `ini:"SSH_KEY_TEST_PATH"` - KeygenPath string `ini:"SSH_KEYGEN_PATH"` - AuthorizedKeysBackup bool `ini:"SSH_AUTHORIZED_KEYS_BACKUP"` - AuthorizedPrincipalsBackup bool `ini:"SSH_AUTHORIZED_PRINCIPALS_BACKUP"` - MinimumKeySizeCheck bool `ini:"-"` - MinimumKeySizes map[string]int `ini:"-"` - CreateAuthorizedKeysFile bool `ini:"SSH_CREATE_AUTHORIZED_KEYS_FILE"` - CreateAuthorizedPrincipalsFile bool `ini:"SSH_CREATE_AUTHORIZED_PRINCIPALS_FILE"` - ExposeAnonymous bool `ini:"SSH_EXPOSE_ANONYMOUS"` - AuthorizedPrincipalsAllow []string `ini:"SSH_AUTHORIZED_PRINCIPALS_ALLOW"` - AuthorizedPrincipalsEnabled bool `ini:"-"` - TrustedUserCAKeys []string `ini:"SSH_TRUSTED_USER_CA_KEYS"` - TrustedUserCAKeysFile string `ini:"SSH_TRUSTED_USER_CA_KEYS_FILENAME"` - TrustedUserCAKeysParsed []gossh.PublicKey `ini:"-"` + Disabled bool `ini:"DISABLE_SSH"` + StartBuiltinServer bool `ini:"START_SSH_SERVER"` + BuiltinServerUser string `ini:"BUILTIN_SSH_SERVER_USER"` + Domain string `ini:"SSH_DOMAIN"` + Port int `ini:"SSH_PORT"` + ListenHost string `ini:"SSH_LISTEN_HOST"` + ListenPort int `ini:"SSH_LISTEN_PORT"` + RootPath string `ini:"SSH_ROOT_PATH"` + ServerCiphers []string `ini:"SSH_SERVER_CIPHERS"` + ServerKeyExchanges []string `ini:"SSH_SERVER_KEY_EXCHANGES"` + ServerMACs []string `ini:"SSH_SERVER_MACS"` + ServerHostKeys []string `ini:"SSH_SERVER_HOST_KEYS"` + KeyTestPath string `ini:"SSH_KEY_TEST_PATH"` + KeygenPath string `ini:"SSH_KEYGEN_PATH"` + AuthorizedKeysBackup bool `ini:"SSH_AUTHORIZED_KEYS_BACKUP"` + AuthorizedPrincipalsBackup bool `ini:"SSH_AUTHORIZED_PRINCIPALS_BACKUP"` + AuthorizedKeysCommandTemplate string `ini:"SSH_AUTHORIZED_KEYS_COMMAND_TEMPLATE"` + AuthorizedKeysCommandTemplateTemplate *template.Template `ini:"-"` + MinimumKeySizeCheck bool `ini:"-"` + MinimumKeySizes map[string]int `ini:"-"` + CreateAuthorizedKeysFile bool `ini:"SSH_CREATE_AUTHORIZED_KEYS_FILE"` + CreateAuthorizedPrincipalsFile bool `ini:"SSH_CREATE_AUTHORIZED_PRINCIPALS_FILE"` + ExposeAnonymous bool `ini:"SSH_EXPOSE_ANONYMOUS"` + AuthorizedPrincipalsAllow []string `ini:"SSH_AUTHORIZED_PRINCIPALS_ALLOW"` + AuthorizedPrincipalsEnabled bool `ini:"-"` + TrustedUserCAKeys []string `ini:"SSH_TRUSTED_USER_CA_KEYS"` + TrustedUserCAKeysFile string `ini:"SSH_TRUSTED_USER_CA_KEYS_FILENAME"` + TrustedUserCAKeysParsed []gossh.PublicKey `ini:"-"` + PerWriteTimeout time.Duration `ini:"SSH_PER_WRITE_TIMEOUT"` + PerWritePerKbTimeout time.Duration `ini:"SSH_PER_WRITE_PER_KB_TIMEOUT"` }{ - Disabled: false, - StartBuiltinServer: false, - Domain: "", - Port: 22, - ServerCiphers: []string{"aes128-ctr", "aes192-ctr", "aes256-ctr", "aes128-gcm@openssh.com", "arcfour256", "arcfour128"}, - ServerKeyExchanges: []string{"diffie-hellman-group1-sha1", "diffie-hellman-group14-sha1", "ecdh-sha2-nistp256", "ecdh-sha2-nistp384", "ecdh-sha2-nistp521", "curve25519-sha256@libssh.org"}, - ServerMACs: []string{"hmac-sha2-256-etm@openssh.com", "hmac-sha2-256", "hmac-sha1", "hmac-sha1-96"}, - KeygenPath: "ssh-keygen", - MinimumKeySizeCheck: true, - MinimumKeySizes: map[string]int{"ed25519": 256, "ed25519-sk": 256, "ecdsa": 256, "ecdsa-sk": 256, "rsa": 2048}, - ServerHostKeys: []string{"ssh/gitea.rsa", "ssh/gogs.rsa"}, + Disabled: false, + StartBuiltinServer: false, + Domain: "", + Port: 22, + ServerCiphers: []string{"aes128-ctr", "aes192-ctr", "aes256-ctr", "aes128-gcm@openssh.com", "arcfour256", "arcfour128"}, + ServerKeyExchanges: []string{"diffie-hellman-group1-sha1", "diffie-hellman-group14-sha1", "ecdh-sha2-nistp256", "ecdh-sha2-nistp384", "ecdh-sha2-nistp521", "curve25519-sha256@libssh.org"}, + ServerMACs: []string{"hmac-sha2-256-etm@openssh.com", "hmac-sha2-256", "hmac-sha1", "hmac-sha1-96"}, + KeygenPath: "ssh-keygen", + MinimumKeySizeCheck: true, + MinimumKeySizes: map[string]int{"ed25519": 256, "ed25519-sk": 256, "ecdsa": 256, "ecdsa-sk": 256, "rsa": 2048}, + ServerHostKeys: []string{"ssh/gitea.rsa", "ssh/gogs.rsa"}, + AuthorizedKeysCommandTemplate: "{{.AppPath}} --config={{.CustomConf}} serv key-{{.Key.ID}}", + PerWriteTimeout: PerWriteTimeout, + PerWritePerKbTimeout: PerWritePerKbTimeout, } // Security settings @@ -198,7 +208,9 @@ var ( DefaultTheme string Themes []string Reactions []string - ReactionsMap map[string]bool + ReactionsMap map[string]bool `ini:"-"` + CustomEmojis []string + CustomEmojisMap map[string]string `ini:"-"` SearchRepoDescription bool UseServiceWorker bool @@ -246,6 +258,8 @@ var ( DefaultTheme: `gitea`, Themes: []string{`gitea`, `arc-green`}, Reactions: []string{`+1`, `-1`, `laugh`, `hooray`, `confused`, `heart`, `rocket`, `eyes`}, + CustomEmojis: []string{`git`, `gitea`, `codeberg`, `gitlab`, `github`, `gogs`}, + CustomEmojisMap: map[string]string{"git": ":git:", "gitea": ":gitea:", "codeberg": ":codeberg:", "gitlab": ":gitlab:", "github": ":github:", "gogs": ":gogs:"}, Notification: struct { MinTimeout time.Duration TimeoutStep time.Duration @@ -319,6 +333,7 @@ var ( DisableRouterLog bool RouterLogLevel log.Level EnableAccessLog bool + EnableSSHLog bool AccessLogTemplate string EnableXORMLog bool @@ -360,14 +375,17 @@ var ( AccessTokenExpirationTime int64 RefreshTokenExpirationTime int64 InvalidateRefreshTokens bool - JWTSecretBytes []byte `ini:"-"` + JWTSigningAlgorithm string `ini:"JWT_SIGNING_ALGORITHM"` JWTSecretBase64 string `ini:"JWT_SECRET"` + JWTSigningPrivateKeyFile string `ini:"JWT_SIGNING_PRIVATE_KEY_FILE"` MaxTokenLength int }{ Enable: true, AccessTokenExpirationTime: 3600, RefreshTokenExpirationTime: 730, InvalidateRefreshTokens: false, + JWTSigningAlgorithm: "RS256", + JWTSigningPrivateKeyFile: "jwt/private.pem", MaxTokenLength: math.MaxInt16, } @@ -455,7 +473,8 @@ func getWorkPath(appPath string) string { func init() { IsWindows = runtime.GOOS == "windows" // We can rely on log.CanColorStdout being set properly because modules/log/console_windows.go comes before modules/setting/setting.go lexicographically - log.NewLogger(0, "console", "console", fmt.Sprintf(`{"level": "trace", "colorize": %t, "stacktraceLevel": "none"}`, log.CanColorStdout)) + // By default set this logger at Info - we'll change it later but we need to start with something. + log.NewLogger(0, "console", "console", fmt.Sprintf(`{"level": "info", "colorize": %t, "stacktraceLevel": "none"}`, log.CanColorStdout)) var err error if AppPath, err = getAppPath(); err != nil { @@ -611,6 +630,8 @@ func NewContext() { GracefulRestartable = sec.Key("ALLOW_GRACEFUL_RESTARTS").MustBool(true) GracefulHammerTime = sec.Key("GRACEFUL_HAMMER_TIME").MustDuration(60 * time.Second) StartupTimeout = sec.Key("STARTUP_TIMEOUT").MustDuration(0 * time.Second) + PerWriteTimeout = sec.Key("PER_WRITE_TIMEOUT").MustDuration(PerWriteTimeout) + PerWritePerKbTimeout = sec.Key("PER_WRITE_PER_KB_TIMEOUT").MustDuration(PerWritePerKbTimeout) defaultAppURL := string(Protocol) + "://" + Domain if (Protocol == HTTP && HTTPPort != "80") || (Protocol == HTTPS && HTTPPort != "443") { @@ -776,44 +797,20 @@ func NewContext() { } SSH.ExposeAnonymous = sec.Key("SSH_EXPOSE_ANONYMOUS").MustBool(false) + SSH.AuthorizedKeysCommandTemplate = sec.Key("SSH_AUTHORIZED_KEYS_COMMAND_TEMPLATE").MustString(SSH.AuthorizedKeysCommandTemplate) + + SSH.AuthorizedKeysCommandTemplateTemplate = template.Must(template.New("").Parse(SSH.AuthorizedKeysCommandTemplate)) + + SSH.PerWriteTimeout = sec.Key("SSH_PER_WRITE_TIMEOUT").MustDuration(PerWriteTimeout) + SSH.PerWritePerKbTimeout = sec.Key("SSH_PER_WRITE_PER_KB_TIMEOUT").MustDuration(PerWritePerKbTimeout) if err = Cfg.Section("oauth2").MapTo(&OAuth2); err != nil { log.Fatal("Failed to OAuth2 settings: %v", err) return } - if OAuth2.Enable { - OAuth2.JWTSecretBytes = make([]byte, 32) - n, err := base64.RawURLEncoding.Decode(OAuth2.JWTSecretBytes, []byte(OAuth2.JWTSecretBase64)) - - if err != nil || n != 32 { - OAuth2.JWTSecretBase64, err = generate.NewJwtSecret() - if err != nil { - log.Fatal("error generating JWT secret: %v", err) - return - } - cfg := ini.Empty() - isFile, err := util.IsFile(CustomConf) - if err != nil { - log.Error("Unable to check if %s is a file. Error: %v", CustomConf, err) - } - if isFile { - if err := cfg.Append(CustomConf); err != nil { - log.Error("failed to load custom conf %s: %v", CustomConf, err) - return - } - } - cfg.Section("oauth2").Key("JWT_SECRET").SetValue(OAuth2.JWTSecretBase64) - - if err := os.MkdirAll(filepath.Dir(CustomConf), os.ModePerm); err != nil { - log.Fatal("failed to create '%s': %v", CustomConf, err) - return - } - if err := cfg.SaveTo(CustomConf); err != nil { - log.Fatal("error saving generating JWT secret to custom config: %v", err) - return - } - } + if !filepath.IsAbs(OAuth2.JWTSigningPrivateKeyFile) { + OAuth2.JWTSigningPrivateKeyFile = filepath.Join(AppDataPath, OAuth2.JWTSigningPrivateKeyFile) } sec = Cfg.Section("admin") @@ -990,6 +987,10 @@ func NewContext() { for _, reaction := range UI.Reactions { UI.ReactionsMap[reaction] = true } + UI.CustomEmojisMap = make(map[string]string) + for _, emoji := range UI.CustomEmojis { + UI.CustomEmojisMap[emoji] = ":" + emoji + ":" + } } func parseAuthorizedPrincipalsAllow(values []string) ([]string, bool) { @@ -1074,26 +1075,9 @@ func loadOrGenerateInternalToken(sec *ini.Section) string { } // Save secret - cfgSave := ini.Empty() - isFile, err := util.IsFile(CustomConf) - if err != nil { - log.Error("Unable to check if %s is a file. Error: %v", CustomConf, err) - } - if isFile { - // Keeps custom settings if there is already something. - if err := cfgSave.Append(CustomConf); err != nil { - log.Error("Failed to load custom conf '%s': %v", CustomConf, err) - } - } - - cfgSave.Section("security").Key("INTERNAL_TOKEN").SetValue(token) - - if err := os.MkdirAll(filepath.Dir(CustomConf), os.ModePerm); err != nil { - log.Fatal("Failed to create '%s': %v", CustomConf, err) - } - if err := cfgSave.SaveTo(CustomConf); err != nil { - log.Fatal("Error saving generated INTERNAL_TOKEN to custom config: %v", err) - } + CreateOrAppendToCustomConf(func(cfg *ini.File) { + cfg.Section("security").Key("INTERNAL_TOKEN").SetValue(token) + }) } return token } @@ -1159,6 +1143,45 @@ func MakeManifestData(appName string, appURL string, absoluteAssetURL string) [] return bytes } +// CreateOrAppendToCustomConf creates or updates the custom config. +// Use the callback to set individual values. +func CreateOrAppendToCustomConf(callback func(cfg *ini.File)) { + cfg := ini.Empty() + isFile, err := util.IsFile(CustomConf) + if err != nil { + log.Error("Unable to check if %s is a file. Error: %v", CustomConf, err) + } + if isFile { + if err := cfg.Append(CustomConf); err != nil { + log.Error("failed to load custom conf %s: %v", CustomConf, err) + return + } + } + + callback(cfg) + + if err := os.MkdirAll(filepath.Dir(CustomConf), os.ModePerm); err != nil { + log.Fatal("failed to create '%s': %v", CustomConf, err) + return + } + if err := cfg.SaveTo(CustomConf); err != nil { + log.Fatal("error saving to custom config: %v", err) + } + + // Change permissions to be more restrictive + fi, err := os.Stat(CustomConf) + if err != nil { + log.Error("Failed to determine current conf file permissions: %v", err) + return + } + + if fi.Mode().Perm() > 0o600 { + if err = os.Chmod(CustomConf, 0o600); err != nil { + log.Warn("Failed changing conf file permissions to -rw-------. Consider changing them manually.") + } + } +} + // NewServices initializes the services func NewServices() { InitDBConfig() @@ -1177,4 +1200,11 @@ func NewServices() { newTaskService() NewQueueService() newProject() + newMimeTypeMap() +} + +// NewServicesForInstall initializes the services for install +func NewServicesForInstall() { + newService() + newMailService() } diff --git a/modules/setting/storage.go b/modules/setting/storage.go index 3ab08d8d2a02..075152db59cd 100644 --- a/modules/setting/storage.go +++ b/modules/setting/storage.go @@ -43,6 +43,10 @@ func getStorage(name, typ string, targetSec *ini.Section) Storage { sec.Key("MINIO_LOCATION").MustString("us-east-1") sec.Key("MINIO_USE_SSL").MustBool(false) + if targetSec == nil { + targetSec, _ = Cfg.NewSection(name) + } + var storage Storage storage.Section = targetSec storage.Type = typ diff --git a/modules/ssh/ssh.go b/modules/ssh/ssh.go index 22683b003be1..efe952534551 100644 --- a/modules/ssh/ssh.go +++ b/modules/ssh/ssh.go @@ -6,12 +6,14 @@ package ssh import ( "bytes" + "context" "crypto/rand" "crypto/rsa" "crypto/x509" "encoding/pem" "fmt" "io" + "net" "os" "os/exec" "path/filepath" @@ -65,7 +67,11 @@ func sessionHandler(session ssh.Session) { args := []string{"serv", "key-" + keyID, "--config=" + setting.CustomConf} log.Trace("SSH: Arguments: %v", args) - cmd := exec.Command(setting.AppPath, args...) + + ctx, cancel := context.WithCancel(session.Context()) + defer cancel() + + cmd := exec.CommandContext(ctx, setting.AppPath, args...) cmd.Env = append( os.Environ(), "SSH_ORIGINAL_COMMAND="+command, @@ -77,16 +83,21 @@ func sessionHandler(session ssh.Session) { log.Error("SSH: StdoutPipe: %v", err) return } + defer stdout.Close() + stderr, err := cmd.StderrPipe() if err != nil { log.Error("SSH: StderrPipe: %v", err) return } + defer stderr.Close() + stdin, err := cmd.StdinPipe() if err != nil { log.Error("SSH: StdinPipe: %v", err) return } + defer stdin.Close() wg := &sync.WaitGroup{} wg.Add(2) @@ -105,6 +116,7 @@ func sessionHandler(session ssh.Session) { go func() { defer wg.Done() + defer stdout.Close() if _, err := io.Copy(session, stdout); err != nil { log.Error("Failed to write stdout to session. %s", err) } @@ -112,6 +124,7 @@ func sessionHandler(session ssh.Session) { go func() { defer wg.Done() + defer stderr.Close() if _, err := io.Copy(session.Stderr(), stderr); err != nil { log.Error("Failed to write stderr to session. %s", err) } @@ -239,6 +252,15 @@ func publicKeyHandler(ctx ssh.Context, key ssh.PublicKey) bool { return true } +// sshConnectionFailed logs a failed connection +// - this mainly exists to give a nice function name in logging +func sshConnectionFailed(conn net.Conn, err error) { + // Log the underlying error with a specific message + log.Warn("Failed connection from %s with error: %v", conn.RemoteAddr(), err) + // Log with the standard failed authentication from message for simpler fail2ban configuration + log.Warn("Failed authentication attempt from %s", conn.RemoteAddr()) +} + // Listen starts a SSH server listens on given port. func Listen(host string, port int, ciphers []string, keyExchanges []string, macs []string) { srv := ssh.Server{ @@ -252,6 +274,7 @@ func Listen(host string, port int, ciphers []string, keyExchanges []string, macs config.Ciphers = ciphers return config }, + ConnectionFailedCallback: sshConnectionFailed, // We need to explicitly disable the PtyCallback so text displays // properly. PtyCallback: func(ctx ssh.Context, pty ssh.Pty) bool { diff --git a/modules/ssh/ssh_graceful.go b/modules/ssh/ssh_graceful.go index c213aa7b88a9..08a7c857529a 100644 --- a/modules/ssh/ssh_graceful.go +++ b/modules/ssh/ssh_graceful.go @@ -7,12 +7,15 @@ package ssh import ( "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" "github.com/gliderlabs/ssh" ) func listen(server *ssh.Server) { gracefulServer := graceful.NewServer("tcp", server.Addr, "SSH") + gracefulServer.PerWriteTimeout = setting.SSH.PerWriteTimeout + gracefulServer.PerWritePerKbTimeout = setting.SSH.PerWritePerKbTimeout err := gracefulServer.ListenAndServe(server.Serve) if err != nil { diff --git a/modules/storage/local.go b/modules/storage/local.go index 46e5d60e6b5d..1329f722c285 100644 --- a/modules/storage/local.go +++ b/modules/storage/local.go @@ -96,7 +96,7 @@ func (l *LocalStorage) Save(path string, r io.Reader, size int64) (int64, error) return 0, err } - if err := os.Rename(tmp.Name(), p); err != nil { + if err := util.Rename(tmp.Name(), p); err != nil { return 0, err } diff --git a/modules/storage/storage.go b/modules/storage/storage.go index 984f154db48b..9f87e58b60d4 100644 --- a/modules/storage/storage.go +++ b/modules/storage/storage.go @@ -71,7 +71,7 @@ type ObjectStorage interface { IterateObjects(func(path string, obj Object) error) error } -// Copy copys a file from source ObjectStorage to dest ObjectStorage +// Copy copies a file from source ObjectStorage to dest ObjectStorage func Copy(dstStorage ObjectStorage, dstPath string, srcStorage ObjectStorage, srcPath string) (int64, error) { f, err := srcStorage.Open(srcPath) if err != nil { @@ -88,6 +88,13 @@ func Copy(dstStorage ObjectStorage, dstPath string, srcStorage ObjectStorage, sr return dstStorage.Save(dstPath, f, size) } +// Clean delete all the objects in this storage +func Clean(storage ObjectStorage) error { + return storage.IterateObjects(func(path string, obj Object) error { + return storage.Delete(path) + }) +} + // SaveFrom saves data to the ObjectStorage with path p from the callback func SaveFrom(objStorage ObjectStorage, p string, callback func(w io.Writer) error) error { pr, pw := io.Pipe() @@ -114,6 +121,9 @@ var ( Avatars ObjectStorage // RepoAvatars represents repository avatars storage RepoAvatars ObjectStorage + + // RepoArchives represents repository archives storage + RepoArchives ObjectStorage ) // Init init the stoarge @@ -130,7 +140,11 @@ func Init() error { return err } - return initLFS() + if err := initLFS(); err != nil { + return err + } + + return initRepoArchives() } // NewStorage takes a storage type and some config and returns an ObjectStorage or an error @@ -169,3 +183,9 @@ func initRepoAvatars() (err error) { RepoAvatars, err = NewStorage(setting.RepoAvatar.Storage.Type, &setting.RepoAvatar.Storage) return } + +func initRepoArchives() (err error) { + log.Info("Initialising Repository Archive storage with type: %s", setting.RepoArchive.Storage.Type) + RepoArchives, err = NewStorage(setting.RepoArchive.Storage.Type, &setting.RepoArchive.Storage) + return +} diff --git a/modules/structs/admin_user.go b/modules/structs/admin_user.go index 5da4e9608bea..facf16a39552 100644 --- a/modules/structs/admin_user.go +++ b/modules/structs/admin_user.go @@ -19,6 +19,7 @@ type CreateUserOption struct { Password string `json:"password" binding:"Required;MaxSize(255)"` MustChangePassword *bool `json:"must_change_password"` SendNotify bool `json:"send_notify"` + Visibility string `json:"visibility" binding:"In(,public,limited,private)"` } // EditUserOption edit user options @@ -43,4 +44,5 @@ type EditUserOption struct { ProhibitLogin *bool `json:"prohibit_login"` AllowCreateOrganization *bool `json:"allow_create_organization"` Restricted *bool `json:"restricted"` + Visibility string `json:"visibility" binding:"In(,public,limited,private)"` } diff --git a/modules/structs/hook.go b/modules/structs/hook.go index 693820b57d3e..e4ec99df40d7 100644 --- a/modules/structs/hook.go +++ b/modules/structs/hook.go @@ -62,7 +62,6 @@ type EditHookOption struct { // Payloader payload is some part of one hook type Payloader interface { - SetSecret(string) JSONPayload() ([]byte, error) } @@ -124,7 +123,6 @@ var ( // CreatePayload FIXME type CreatePayload struct { - Secret string `json:"secret"` Sha string `json:"sha"` Ref string `json:"ref"` RefType string `json:"ref_type"` @@ -132,11 +130,6 @@ type CreatePayload struct { Sender *User `json:"sender"` } -// SetSecret modifies the secret of the CreatePayload -func (p *CreatePayload) SetSecret(secret string) { - p.Secret = secret -} - // JSONPayload return payload information func (p *CreatePayload) JSONPayload() ([]byte, error) { json := jsoniter.ConfigCompatibleWithStandardLibrary @@ -181,7 +174,6 @@ const ( // DeletePayload represents delete payload type DeletePayload struct { - Secret string `json:"secret"` Ref string `json:"ref"` RefType string `json:"ref_type"` PusherType PusherType `json:"pusher_type"` @@ -189,11 +181,6 @@ type DeletePayload struct { Sender *User `json:"sender"` } -// SetSecret modifies the secret of the DeletePayload -func (p *DeletePayload) SetSecret(secret string) { - p.Secret = secret -} - // JSONPayload implements Payload func (p *DeletePayload) JSONPayload() ([]byte, error) { json := jsoniter.ConfigCompatibleWithStandardLibrary @@ -209,17 +196,11 @@ func (p *DeletePayload) JSONPayload() ([]byte, error) { // ForkPayload represents fork payload type ForkPayload struct { - Secret string `json:"secret"` Forkee *Repository `json:"forkee"` Repo *Repository `json:"repository"` Sender *User `json:"sender"` } -// SetSecret modifies the secret of the ForkPayload -func (p *ForkPayload) SetSecret(secret string) { - p.Secret = secret -} - // JSONPayload implements Payload func (p *ForkPayload) JSONPayload() ([]byte, error) { json := jsoniter.ConfigCompatibleWithStandardLibrary @@ -238,7 +219,6 @@ const ( // IssueCommentPayload represents a payload information of issue comment event. type IssueCommentPayload struct { - Secret string `json:"secret"` Action HookIssueCommentAction `json:"action"` Issue *Issue `json:"issue"` Comment *Comment `json:"comment"` @@ -248,11 +228,6 @@ type IssueCommentPayload struct { IsPull bool `json:"is_pull"` } -// SetSecret modifies the secret of the IssueCommentPayload -func (p *IssueCommentPayload) SetSecret(secret string) { - p.Secret = secret -} - // JSONPayload implements Payload func (p *IssueCommentPayload) JSONPayload() ([]byte, error) { json := jsoniter.ConfigCompatibleWithStandardLibrary @@ -278,18 +253,12 @@ const ( // ReleasePayload represents a payload information of release event. type ReleasePayload struct { - Secret string `json:"secret"` Action HookReleaseAction `json:"action"` Release *Release `json:"release"` Repository *Repository `json:"repository"` Sender *User `json:"sender"` } -// SetSecret modifies the secret of the ReleasePayload -func (p *ReleasePayload) SetSecret(secret string) { - p.Secret = secret -} - // JSONPayload implements Payload func (p *ReleasePayload) JSONPayload() ([]byte, error) { json := jsoniter.ConfigCompatibleWithStandardLibrary @@ -305,7 +274,6 @@ func (p *ReleasePayload) JSONPayload() ([]byte, error) { // PushPayload represents a payload information of push event. type PushPayload struct { - Secret string `json:"secret"` Ref string `json:"ref"` Before string `json:"before"` After string `json:"after"` @@ -317,11 +285,6 @@ type PushPayload struct { Sender *User `json:"sender"` } -// SetSecret modifies the secret of the PushPayload -func (p *PushPayload) SetSecret(secret string) { - p.Secret = secret -} - // JSONPayload FIXME func (p *PushPayload) JSONPayload() ([]byte, error) { json := jsoniter.ConfigCompatibleWithStandardLibrary @@ -389,7 +352,6 @@ const ( // IssuePayload represents the payload information that is sent along with an issue event. type IssuePayload struct { - Secret string `json:"secret"` Action HookIssueAction `json:"action"` Index int64 `json:"number"` Changes *ChangesPayload `json:"changes,omitempty"` @@ -398,11 +360,6 @@ type IssuePayload struct { Sender *User `json:"sender"` } -// SetSecret modifies the secret of the IssuePayload. -func (p *IssuePayload) SetSecret(secret string) { - p.Secret = secret -} - // JSONPayload encodes the IssuePayload to JSON, with an indentation of two spaces. func (p *IssuePayload) JSONPayload() ([]byte, error) { json := jsoniter.ConfigCompatibleWithStandardLibrary @@ -430,7 +387,6 @@ type ChangesPayload struct { // PullRequestPayload represents a payload information of pull request event. type PullRequestPayload struct { - Secret string `json:"secret"` Action HookIssueAction `json:"action"` Index int64 `json:"number"` Changes *ChangesPayload `json:"changes,omitempty"` @@ -440,11 +396,6 @@ type PullRequestPayload struct { Review *ReviewPayload `json:"review"` } -// SetSecret modifies the secret of the PullRequestPayload. -func (p *PullRequestPayload) SetSecret(secret string) { - p.Secret = secret -} - // JSONPayload FIXME func (p *PullRequestPayload) JSONPayload() ([]byte, error) { json := jsoniter.ConfigCompatibleWithStandardLibrary @@ -476,18 +427,12 @@ const ( // RepositoryPayload payload for repository webhooks type RepositoryPayload struct { - Secret string `json:"secret"` Action HookRepoAction `json:"action"` Repository *Repository `json:"repository"` Organization *User `json:"organization"` Sender *User `json:"sender"` } -// SetSecret modifies the secret of the RepositoryPayload -func (p *RepositoryPayload) SetSecret(secret string) { - p.Secret = secret -} - // JSONPayload JSON representation of the payload func (p *RepositoryPayload) JSONPayload() ([]byte, error) { json := jsoniter.ConfigCompatibleWithStandardLibrary diff --git a/modules/structs/notifications.go b/modules/structs/notifications.go index 8daa6de1686f..675dcf76b127 100644 --- a/modules/structs/notifications.go +++ b/modules/structs/notifications.go @@ -21,14 +21,28 @@ type NotificationThread struct { // NotificationSubject contains the notification subject (Issue/Pull/Commit) type NotificationSubject struct { - Title string `json:"title"` - URL string `json:"url"` - LatestCommentURL string `json:"latest_comment_url"` - Type string `json:"type" binding:"In(Issue,Pull,Commit)"` - State StateType `json:"state"` + Title string `json:"title"` + URL string `json:"url"` + LatestCommentURL string `json:"latest_comment_url"` + Type NotifySubjectType `json:"type" binding:"In(Issue,Pull,Commit)"` + State StateType `json:"state"` } // NotificationCount number of unread notifications type NotificationCount struct { New int64 `json:"new"` } + +// NotifySubjectType represent type of notification subject +type NotifySubjectType string + +const ( + // NotifySubjectIssue an issue is subject of an notification + NotifySubjectIssue NotifySubjectType = "Issue" + // NotifySubjectPull an pull is subject of an notification + NotifySubjectPull NotifySubjectType = "Pull" + // NotifySubjectCommit an commit is subject of an notification + NotifySubjectCommit NotifySubjectType = "Commit" + // NotifySubjectRepository an repository is subject of an notification + NotifySubjectRepository NotifySubjectType = "Repository" +) diff --git a/modules/structs/org.go b/modules/structs/org.go index 483f5044a898..38c6c6d6d849 100644 --- a/modules/structs/org.go +++ b/modules/structs/org.go @@ -31,6 +31,8 @@ type CreateOrgOption struct { RepoAdminChangeTeamAccess bool `json:"repo_admin_change_team_access"` } +// TODO: make EditOrgOption fields optional after https://gitea.com/go-chi/binding/pulls/5 got merged + // EditOrgOption options for editing an organization type EditOrgOption struct { FullName string `json:"full_name"` @@ -40,5 +42,5 @@ type EditOrgOption struct { // possible values are `public`, `limited` or `private` // enum: public,limited,private Visibility string `json:"visibility" binding:"In(,public,limited,private)"` - RepoAdminChangeTeamAccess bool `json:"repo_admin_change_team_access"` + RepoAdminChangeTeamAccess *bool `json:"repo_admin_change_team_access"` } diff --git a/modules/structs/repo.go b/modules/structs/repo.go index 4fdc1e54cb28..2089f4d69cd0 100644 --- a/modules/structs/repo.go +++ b/modules/structs/repo.go @@ -172,6 +172,8 @@ type EditRepoOption struct { AllowManualMerge *bool `json:"allow_manual_merge,omitempty"` // either `true` to enable AutodetectManualMerge, or `false` to prevent it. `has_pull_requests` must be `true`, Note: In some special cases, misjudgments can occur. AutodetectManualMerge *bool `json:"autodetect_manual_merge,omitempty"` + // set to `true` to delete pr branch after merge by default + DefaultDeleteBranchAfterMerge *bool `json:"default_delete_branch_after_merge,omitempty"` // set to a merge style to be used by this repository: "merge", "rebase", "rebase-merge", or "squash". `has_pull_requests` must be `true`. DefaultMergeStyle *string `json:"default_merge_style,omitempty"` // set to `true` to archive this repository. @@ -180,6 +182,36 @@ type EditRepoOption struct { MirrorInterval *string `json:"mirror_interval,omitempty"` } +// GenerateRepoOption options when creating repository using a template +// swagger:model +type GenerateRepoOption struct { + // The organization or person who will own the new repository + // + // required: true + Owner string `json:"owner"` + // Name of the repository to create + // + // required: true + // unique: true + Name string `json:"name" binding:"Required;AlphaDashDot;MaxSize(100)"` + // Description of the repository to create + Description string `json:"description" binding:"MaxSize(255)"` + // Whether the repository is private + Private bool `json:"private"` + // include git content of default branch in template repo + GitContent bool `json:"git_content"` + // include topics in template repo + Topics bool `json:"topics"` + // include git hooks in template repo + GitHooks bool `json:"git_hooks"` + // include webhooks in template repo + Webhooks bool `json:"webhooks"` + // include avatar of the template repo + Avatar bool `json:"avatar"` + // include labels in template repo + Labels bool `json:"labels"` +} + // CreateBranchRepoOption options when creating a branch in a repository // swagger:model type CreateBranchRepoOption struct { diff --git a/modules/structs/repo_tag.go b/modules/structs/repo_tag.go index b62395cac498..80ee1ccf1774 100644 --- a/modules/structs/repo_tag.go +++ b/modules/structs/repo_tag.go @@ -7,6 +7,7 @@ package structs // Tag represents a repository tag type Tag struct { Name string `json:"name"` + Message string `json:"message"` ID string `json:"id"` Commit *CommitMeta `json:"commit"` ZipballURL string `json:"zipball_url"` @@ -30,3 +31,11 @@ type AnnotatedTagObject struct { URL string `json:"url"` SHA string `json:"sha"` } + +// CreateTagOption options when creating a tag +type CreateTagOption struct { + // required: true + TagName string `json:"tag_name" binding:"Required"` + Message string `json:"message"` + Target string `json:"target"` +} diff --git a/modules/structs/settings.go b/modules/structs/settings.go index 842b12792d1d..90c4a2107bbd 100644 --- a/modules/structs/settings.go +++ b/modules/structs/settings.go @@ -18,6 +18,7 @@ type GeneralRepoSettings struct { type GeneralUISettings struct { DefaultTheme string `json:"default_theme"` AllowedReactions []string `json:"allowed_reactions"` + CustomEmojis []string `json:"custom_emojis"` } // GeneralAPISettings contains global api settings exposed by it diff --git a/modules/structs/user.go b/modules/structs/user.go index b5d5b9298ef0..a3c8f0c32a7b 100644 --- a/modules/structs/user.go +++ b/modules/structs/user.go @@ -33,12 +33,23 @@ type User struct { Created time.Time `json:"created,omitempty"` // Is user restricted Restricted bool `json:"restricted"` + // Is user active + IsActive bool `json:"active"` + // Is user login prohibited + ProhibitLogin bool `json:"prohibit_login"` // the user's location Location string `json:"location"` // the user's website Website string `json:"website"` // the user's description Description string `json:"description"` + // User visibility level option: public, limited, private + Visibility string `json:"visibility"` + + // user counts + Followers int `json:"followers_count"` + Following int `json:"following_count"` + StarredRepos int `json:"starred_repos_count"` } // MarshalJSON implements the json.Marshaler interface for User, adding field(s) for backward compatibility @@ -51,3 +62,33 @@ func (u User) MarshalJSON() ([]byte, error) { CompatUserName string `json:"username"` }{shadow(u), u.UserName}) } + +// UserSettings represents user settings +// swagger:model +type UserSettings struct { + FullName string `json:"full_name"` + Website string `json:"website"` + Description string `json:"description"` + Location string `json:"location"` + Language string `json:"language"` + Theme string `json:"theme"` + DiffViewStyle string `json:"diff_view_style"` + // Privacy + HideEmail bool `json:"hide_email"` + HideActivity bool `json:"hide_activity"` +} + +// UserSettingsOptions represents options to change user settings +// swagger:model +type UserSettingsOptions struct { + FullName *string `json:"full_name" binding:"MaxSize(100)"` + Website *string `json:"website" binding:"OmitEmpty;ValidUrl;MaxSize(255)"` + Description *string `json:"description" binding:"MaxSize(255)"` + Location *string `json:"location" binding:"MaxSize(50)"` + Language *string `json:"language"` + Theme *string `json:"theme"` + DiffViewStyle *string `json:"diff_view_style"` + // Privacy + HideEmail *bool `json:"hide_email"` + HideActivity *bool `json:"hide_activity"` +} diff --git a/modules/structs/user_gpgkey.go b/modules/structs/user_gpgkey.go index f501a09cb92d..a2ebf7df93b5 100644 --- a/modules/structs/user_gpgkey.go +++ b/modules/structs/user_gpgkey.go @@ -20,6 +20,7 @@ type GPGKey struct { CanEncryptComms bool `json:"can_encrypt_comms"` CanEncryptStorage bool `json:"can_encrypt_storage"` CanCertify bool `json:"can_certify"` + Verified bool `json:"verified"` // swagger:strfmt date-time Created time.Time `json:"created_at,omitempty"` // swagger:strfmt date-time @@ -40,4 +41,14 @@ type CreateGPGKeyOption struct { // required: true // unique: true ArmoredKey string `json:"armored_public_key" binding:"Required"` + Signature string `json:"armored_signature,omitempty"` +} + +// VerifyGPGKeyOption options verifies user GPG key +type VerifyGPGKeyOption struct { + // An Signature for a GPG key token + // + // required: true + KeyID string `json:"key_id" binding:"Required"` + Signature string `json:"armored_signature" binding:"Required"` } diff --git a/modules/task/migrate.go b/modules/task/migrate.go index 57424abac38c..d7655112d351 100644 --- a/modules/task/migrate.go +++ b/modules/task/migrate.go @@ -20,6 +20,7 @@ import ( "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/util" + jsoniter "github.com/json-iterator/go" ) func handleCreateError(owner *models.User, err error) error { @@ -56,7 +57,7 @@ func runMigrateTask(t *models.Task) (err error) { t.EndTime = timeutil.TimeStampNow() t.Status = structs.TaskStatusFailed - t.Errors = err.Error() + t.Message = err.Error() t.RepoID = 0 if err := t.UpdateCols("status", "errors", "repo_id", "end_time"); err != nil { log.Error("Task UpdateCols failed: %v", err) @@ -73,7 +74,7 @@ func runMigrateTask(t *models.Task) (err error) { return } - // if repository is ready, then just finsih the task + // if repository is ready, then just finish the task if t.Repo.Status == models.RepositoryReady { return nil } @@ -106,7 +107,16 @@ func runMigrateTask(t *models.Task) (err error) { return } - repo, err = migrations.MigrateRepository(ctx, t.Doer, t.Owner.Name, *opts) + repo, err = migrations.MigrateRepository(ctx, t.Doer, t.Owner.Name, *opts, func(format string, args ...interface{}) { + message := models.TranslatableMessage{ + Format: format, + Args: args, + } + json := jsoniter.ConfigCompatibleWithStandardLibrary + bs, _ := json.Marshal(message) + t.Message = string(bs) + _ = t.UpdateCols("message") + }) if err == nil { log.Trace("Repository migrated [%d]: %s/%s", repo.ID, t.Owner.Name, repo.Name) return @@ -118,7 +128,7 @@ func runMigrateTask(t *models.Task) (err error) { } // remoteAddr may contain credentials, so we sanitize it - err = util.URLSanitizedError(err, opts.CloneAddr) + err = util.NewStringURLSanitizedError(err, opts.CloneAddr, true) if strings.Contains(err.Error(), "Authentication failed") || strings.Contains(err.Error(), "could not read Username") { return fmt.Errorf("Authentication failed: %v", err.Error()) diff --git a/modules/task/task.go b/modules/task/task.go index 0443517c0169..1c0a87e1f61a 100644 --- a/modules/task/task.go +++ b/modules/task/task.go @@ -13,8 +13,11 @@ import ( "code.gitea.io/gitea/modules/migrations/base" "code.gitea.io/gitea/modules/queue" repo_module "code.gitea.io/gitea/modules/repository" + "code.gitea.io/gitea/modules/secret" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/util" jsoniter "github.com/json-iterator/go" ) @@ -65,6 +68,24 @@ func MigrateRepository(doer, u *models.User, opts base.MigrateOptions) error { // CreateMigrateTask creates a migrate task func CreateMigrateTask(doer, u *models.User, opts base.MigrateOptions) (*models.Task, error) { + // encrypt credentials for persistence + var err error + opts.CloneAddrEncrypted, err = secret.EncryptSecret(setting.SecretKey, opts.CloneAddr) + if err != nil { + return nil, err + } + opts.CloneAddr = util.NewStringURLSanitizer(opts.CloneAddr, true).Replace(opts.CloneAddr) + opts.AuthPasswordEncrypted, err = secret.EncryptSecret(setting.SecretKey, opts.AuthPassword) + if err != nil { + return nil, err + } + opts.AuthPassword = "" + opts.AuthTokenEncrypted, err = secret.EncryptSecret(setting.SecretKey, opts.AuthToken) + if err != nil { + return nil, err + } + opts.AuthToken = "" + json := jsoniter.ConfigCompatibleWithStandardLibrary bs, err := json.Marshal(&opts) if err != nil { diff --git a/modules/templates/helper.go b/modules/templates/helper.go index 8f1362afc411..f9b2dafd22a1 100644 --- a/modules/templates/helper.go +++ b/modules/templates/helper.go @@ -27,6 +27,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/emoji" + "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/repository" @@ -35,7 +36,6 @@ import ( "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/services/gitdiff" - mirror_service "code.gitea.io/gitea/services/mirror" "github.com/editorconfig/editorconfig-core-go/v2" jsoniter "github.com/json-iterator/go" @@ -60,7 +60,7 @@ func NewFuncMap() []template.FuncMap { "AppSubUrl": func() string { return setting.AppSubURL }, - "StaticUrlPrefix": func() string { + "AssetUrlPrefix": func() string { return setting.StaticURLPrefix + "/assets" }, "AppUrl": func() string { @@ -90,6 +90,9 @@ func NewFuncMap() []template.FuncMap { "AllowedReactions": func() []string { return setting.UI.Reactions }, + "CustomEmojis": func() map[string]string { + return setting.UI.CustomEmojisMap + }, "Safe": Safe, "SafeJS": SafeJS, "JSEscape": JSEscape, @@ -294,11 +297,8 @@ func NewFuncMap() []template.FuncMap { } return float32(n) * 100 / float32(sum) }, - "CommentMustAsDiff": gitdiff.CommentMustAsDiff, - "MirrorAddress": mirror_service.Address, - "MirrorFullAddress": mirror_service.AddressNoCredentials, - "MirrorUserName": mirror_service.Username, - "MirrorPassword": mirror_service.Password, + "CommentMustAsDiff": gitdiff.CommentMustAsDiff, + "MirrorRemoteAddress": mirrorRemoteAddress, "CommitType": func(commit interface{}) string { switch commit.(type) { case models.SignCommitWithStatuses: @@ -766,7 +766,7 @@ func ReactionToEmoji(reaction string) template.HTML { if val != nil { return template.HTML(val.Emoji) } - return template.HTML(fmt.Sprintf(`:%s:`, reaction, setting.StaticURLPrefix, reaction)) + return template.HTML(fmt.Sprintf(`:%s:`, reaction, setting.StaticURLPrefix, reaction)) } // RenderNote renders the contents of a git-notes file as a commit message. @@ -963,3 +963,28 @@ func buildSubjectBodyTemplate(stpl *texttmpl.Template, btpl *template.Template, log.Warn("Failed to parse template [%s/body]: %v", name, err) } } + +type remoteAddress struct { + Address string + Username string + Password string +} + +func mirrorRemoteAddress(m models.RemoteMirrorer) remoteAddress { + a := remoteAddress{} + + u, err := git.GetRemoteAddress(m.GetRepository().RepoPath(), m.GetRemoteName()) + if err != nil { + log.Error("GetRemoteAddress %v", err) + return a + } + + if u.User != nil { + a.Username = u.User.Username() + a.Password, _ = u.User.Password() + } + u.User = nil + a.Address = u.String() + + return a +} diff --git a/modules/typesniffer/typesniffer.go b/modules/typesniffer/typesniffer.go new file mode 100644 index 000000000000..d257b8179b0a --- /dev/null +++ b/modules/typesniffer/typesniffer.go @@ -0,0 +1,96 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package typesniffer + +import ( + "fmt" + "io" + "net/http" + "regexp" + "strings" +) + +// Use at most this many bytes to determine Content Type. +const sniffLen = 1024 + +// SvgMimeType MIME type of SVG images. +const SvgMimeType = "image/svg+xml" + +var svgTagRegex = regexp.MustCompile(`(?si)\A\s*(?:(||>))\s*)*\/]`) +var svgTagInXMLRegex = regexp.MustCompile(`(?si)\A<\?xml\b.*?\?>\s*(?:(||>))\s*)*\/]`) + +// SniffedType contains information about a blobs type. +type SniffedType struct { + contentType string +} + +// IsText etects if content format is plain text. +func (ct SniffedType) IsText() bool { + return strings.Contains(ct.contentType, "text/") +} + +// IsImage detects if data is an image format +func (ct SniffedType) IsImage() bool { + return strings.Contains(ct.contentType, "image/") +} + +// IsSvgImage detects if data is an SVG image format +func (ct SniffedType) IsSvgImage() bool { + return strings.Contains(ct.contentType, SvgMimeType) +} + +// IsPDF detects if data is a PDF format +func (ct SniffedType) IsPDF() bool { + return strings.Contains(ct.contentType, "application/pdf") +} + +// IsVideo detects if data is an video format +func (ct SniffedType) IsVideo() bool { + return strings.Contains(ct.contentType, "video/") +} + +// IsAudio detects if data is an video format +func (ct SniffedType) IsAudio() bool { + return strings.Contains(ct.contentType, "audio/") +} + +// IsRepresentableAsText returns true if file content can be represented as +// plain text or is empty. +func (ct SniffedType) IsRepresentableAsText() bool { + return ct.IsText() || ct.IsSvgImage() +} + +// DetectContentType extends http.DetectContentType with more content types. Defaults to text/unknown if input is empty. +func DetectContentType(data []byte) SniffedType { + if len(data) == 0 { + return SniffedType{"text/unknown"} + } + + ct := http.DetectContentType(data) + + if len(data) > sniffLen { + data = data[:sniffLen] + } + + if (strings.Contains(ct, "text/plain") || strings.Contains(ct, "text/html")) && svgTagRegex.Match(data) || + strings.Contains(ct, "text/xml") && svgTagInXMLRegex.Match(data) { + // SVG is unsupported. https://github.com/golang/go/issues/15888 + ct = SvgMimeType + } + + return SniffedType{ct} +} + +// DetectContentTypeFromReader guesses the content type contained in the reader. +func DetectContentTypeFromReader(r io.Reader) (SniffedType, error) { + buf := make([]byte, sniffLen) + n, err := r.Read(buf) + if err != nil && err != io.EOF { + return SniffedType{}, fmt.Errorf("DetectContentTypeFromReader io error: %w", err) + } + buf = buf[:n] + + return DetectContentType(buf), nil +} diff --git a/modules/typesniffer/typesniffer_test.go b/modules/typesniffer/typesniffer_test.go new file mode 100644 index 000000000000..a3b47c459863 --- /dev/null +++ b/modules/typesniffer/typesniffer_test.go @@ -0,0 +1,97 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package typesniffer + +import ( + "bytes" + "encoding/base64" + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDetectContentTypeLongerThanSniffLen(t *testing.T) { + // Pre-condition: Shorter than sniffLen detects SVG. + assert.Equal(t, "image/svg+xml", DetectContentType([]byte(``)).contentType) + // Longer than sniffLen detects something else. + assert.NotEqual(t, "image/svg+xml", DetectContentType([]byte(``)).contentType) +} + +func TestIsTextFile(t *testing.T) { + assert.True(t, DetectContentType([]byte{}).IsText()) + assert.True(t, DetectContentType([]byte("lorem ipsum")).IsText()) +} + +func TestIsSvgImage(t *testing.T) { + assert.True(t, DetectContentType([]byte("")).IsSvgImage()) + assert.True(t, DetectContentType([]byte(" ")).IsSvgImage()) + assert.True(t, DetectContentType([]byte(``)).IsSvgImage()) + assert.True(t, DetectContentType([]byte("")).IsSvgImage()) + assert.True(t, DetectContentType([]byte(``)).IsSvgImage()) + assert.True(t, DetectContentType([]byte(` + `)).IsSvgImage()) + assert.True(t, DetectContentType([]byte(` + + `)).IsSvgImage()) + assert.True(t, DetectContentType([]byte(` + `)).IsSvgImage()) + assert.True(t, DetectContentType([]byte(` + `)).IsSvgImage()) + assert.True(t, DetectContentType([]byte(` + + `)).IsSvgImage()) + assert.True(t, DetectContentType([]byte(` + + + `)).IsSvgImage()) + assert.True(t, DetectContentType([]byte(` + + `)).IsSvgImage()) + assert.True(t, DetectContentType([]byte(` + + + `)).IsSvgImage()) + assert.False(t, DetectContentType([]byte{}).IsSvgImage()) + assert.False(t, DetectContentType([]byte("svg")).IsSvgImage()) + assert.False(t, DetectContentType([]byte("")).IsSvgImage()) + assert.False(t, DetectContentType([]byte("text")).IsSvgImage()) + assert.False(t, DetectContentType([]byte("")).IsSvgImage()) + assert.False(t, DetectContentType([]byte(``)).IsSvgImage()) + assert.False(t, DetectContentType([]byte(` + `)).IsSvgImage()) + assert.False(t, DetectContentType([]byte(` + + `)).IsSvgImage()) +} + +func TestIsPDF(t *testing.T) { + pdf, _ := base64.StdEncoding.DecodeString("JVBERi0xLjYKJcOkw7zDtsOfCjIgMCBvYmoKPDwvTGVuZ3RoIDMgMCBSL0ZpbHRlci9GbGF0ZURlY29kZT4+CnN0cmVhbQp4nF3NPwsCMQwF8D2f4s2CNYk1baF0EHRwOwg4iJt/NsFb/PpevUE4Mjwe") + assert.True(t, DetectContentType(pdf).IsPDF()) + assert.False(t, DetectContentType([]byte("plain text")).IsPDF()) +} + +func TestIsVideo(t *testing.T) { + mp4, _ := base64.StdEncoding.DecodeString("AAAAGGZ0eXBtcDQyAAAAAGlzb21tcDQyAAEI721vb3YAAABsbXZoZAAAAADaBlwX2gZcFwAAA+gA") + assert.True(t, DetectContentType(mp4).IsVideo()) + assert.False(t, DetectContentType([]byte("plain text")).IsVideo()) +} + +func TestIsAudio(t *testing.T) { + mp3, _ := base64.StdEncoding.DecodeString("SUQzBAAAAAABAFRYWFgAAAASAAADbWFqb3JfYnJhbmQAbXA0MgBUWFhYAAAAEQAAA21pbm9yX3Zl") + assert.True(t, DetectContentType(mp3).IsAudio()) + assert.False(t, DetectContentType([]byte("plain text")).IsAudio()) +} + +func TestDetectContentTypeFromReader(t *testing.T) { + mp3, _ := base64.StdEncoding.DecodeString("SUQzBAAAAAABAFRYWFgAAAASAAADbWFqb3JfYnJhbmQAbXA0MgBUWFhYAAAAEQAAA21pbm9yX3Zl") + st, err := DetectContentTypeFromReader(bytes.NewReader(mp3)) + assert.NoError(t, err) + assert.True(t, st.IsAudio()) +} diff --git a/modules/util/path.go b/modules/util/path.go index 2ac8f4d80a3e..e79747327d75 100644 --- a/modules/util/path.go +++ b/modules/util/path.go @@ -154,7 +154,7 @@ func StatDir(rootPath string, includeDir ...bool) ([]string, error) { return statDir(rootPath, "", isIncludeDir, false, false) } -// FileURLToPath extracts the path informations from a file://... url. +// FileURLToPath extracts the path information from a file://... url. func FileURLToPath(u *url.URL) (string, error) { if u.Scheme != "file" { return "", errors.New("URL scheme is not 'file': " + u.String()) diff --git a/modules/util/remove.go b/modules/util/remove.go index f2bbbc30b928..23104365256f 100644 --- a/modules/util/remove.go +++ b/modules/util/remove.go @@ -33,7 +33,7 @@ func Remove(name string) error { return err } -// RemoveAll removes the named file or (empty) directory with at most 5 attempts.Remove +// RemoveAll removes the named file or (empty) directory with at most 5 attempts. func RemoveAll(name string) error { var err error for i := 0; i < 5; i++ { @@ -55,3 +55,30 @@ func RemoveAll(name string) error { } return err } + +// Rename renames (moves) oldpath to newpath with at most 5 attempts. +func Rename(oldpath, newpath string) error { + var err error + for i := 0; i < 5; i++ { + err = os.Rename(oldpath, newpath) + if err == nil { + break + } + unwrapped := err.(*os.PathError).Err + if unwrapped == syscall.EBUSY || unwrapped == syscall.ENOTEMPTY || unwrapped == syscall.EPERM || unwrapped == syscall.EMFILE || unwrapped == syscall.ENFILE { + // try again + <-time.After(100 * time.Millisecond) + continue + } + + if i == 0 && os.IsNotExist(err) { + return err + } + + if unwrapped == syscall.ENOENT { + // it's already gone + return nil + } + } + return err +} diff --git a/modules/util/sanitize.go b/modules/util/sanitize.go index a4f5479dfb74..de59ffaa2e5d 100644 --- a/modules/util/sanitize.go +++ b/modules/util/sanitize.go @@ -1,4 +1,4 @@ -// Copyright 2017 The Gitea Authors. All rights reserved. +// Copyright 2021 The Gitea Authors. All rights reserved. // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. @@ -9,40 +9,53 @@ import ( "strings" ) -// urlSafeError wraps an error whose message may contain a sensitive URL -type urlSafeError struct { - err error - unsanitizedURL string +const userPlaceholder = "sanitized-credential" +const unparsableURL = "(unparsable url)" + +type sanitizedError struct { + err error + replacer *strings.Replacer } -func (err urlSafeError) Error() string { - return SanitizeMessage(err.err.Error(), err.unsanitizedURL) +func (err sanitizedError) Error() string { + return err.replacer.Replace(err.err.Error()) } -// URLSanitizedError returns the sanitized version an error whose message may -// contain a sensitive URL -func URLSanitizedError(err error, unsanitizedURL string) error { - return urlSafeError{err: err, unsanitizedURL: unsanitizedURL} +// NewSanitizedError wraps an error and replaces all old, new string pairs in the message text. +func NewSanitizedError(err error, oldnew ...string) error { + return sanitizedError{err: err, replacer: strings.NewReplacer(oldnew...)} } -// SanitizeMessage sanitizes a message which may contains a sensitive URL -func SanitizeMessage(message, unsanitizedURL string) string { - sanitizedURL := SanitizeURLCredentials(unsanitizedURL, true) - return strings.ReplaceAll(message, unsanitizedURL, sanitizedURL) +// NewURLSanitizedError wraps an error and replaces the url credential or removes them. +func NewURLSanitizedError(err error, u *url.URL, usePlaceholder bool) error { + return sanitizedError{err: err, replacer: NewURLSanitizer(u, usePlaceholder)} } -// SanitizeURLCredentials sanitizes a url, either removing user credentials -// or replacing them with a placeholder. -func SanitizeURLCredentials(unsanitizedURL string, usePlaceholder bool) string { - u, err := url.Parse(unsanitizedURL) - if err != nil { - // don't log the error, since it might contain unsanitized URL. - return "(unparsable url)" - } +// NewStringURLSanitizedError wraps an error and replaces the url credential or removes them. +// If the url can't get parsed it gets replaced with a placeholder string. +func NewStringURLSanitizedError(err error, unsanitizedURL string, usePlaceholder bool) error { + return sanitizedError{err: err, replacer: NewStringURLSanitizer(unsanitizedURL, usePlaceholder)} +} + +// NewURLSanitizer creates a replacer for the url with the credential sanitized or removed. +func NewURLSanitizer(u *url.URL, usePlaceholder bool) *strings.Replacer { + old := u.String() + if u.User != nil && usePlaceholder { - u.User = url.User("") + u.User = url.User(userPlaceholder) } else { u.User = nil } - return u.String() + return strings.NewReplacer(old, u.String()) +} + +// NewStringURLSanitizer creates a replacer for the url with the credential sanitized or removed. +// If the url can't get parsed it gets replaced with a placeholder string +func NewStringURLSanitizer(unsanitizedURL string, usePlaceholder bool) *strings.Replacer { + u, err := url.Parse(unsanitizedURL) + if err != nil { + // don't log the error, since it might contain unsanitized URL. + return strings.NewReplacer(unsanitizedURL, unparsableURL) + } + return NewURLSanitizer(u, usePlaceholder) } diff --git a/modules/util/sanitize_test.go b/modules/util/sanitize_test.go index 4f07100675b0..578f75f5188f 100644 --- a/modules/util/sanitize_test.go +++ b/modules/util/sanitize_test.go @@ -1,25 +1,164 @@ -// Copyright 2020 The Gitea Authors. All rights reserved. +// Copyright 2021 The Gitea Authors. All rights reserved. // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. package util import ( + "errors" "testing" "github.com/stretchr/testify/assert" ) -func TestSanitizeURLCredentials(t *testing.T) { - var kases = map[string]string{ - "https://github.com/go-gitea/test_repo.git": "https://github.com/go-gitea/test_repo.git", - "https://mytoken@github.com/go-gitea/test_repo.git": "https://github.com/go-gitea/test_repo.git", - "http://github.com/go-gitea/test_repo.git": "http://github.com/go-gitea/test_repo.git", - "/test/repos/repo1": "/test/repos/repo1", - "git@github.com:go-gitea/test_repo.git": "(unparsable url)", +func TestNewSanitizedError(t *testing.T) { + err := errors.New("error while secret on test") + err2 := NewSanitizedError(err) + assert.Equal(t, err.Error(), err2.Error()) + + var cases = []struct { + input error + oldnew []string + expected string + }{ + // case 0 + { + errors.New("error while secret on test"), + []string{"secret", "replaced"}, + "error while replaced on test", + }, + // case 1 + { + errors.New("error while sec-ret on test"), + []string{"secret", "replaced"}, + "error while sec-ret on test", + }, } - for source, value := range kases { - assert.EqualValues(t, value, SanitizeURLCredentials(source, false)) + for n, c := range cases { + err := NewSanitizedError(c.input, c.oldnew...) + + assert.Equal(t, c.expected, err.Error(), "case %d: error should match", n) + } +} + +func TestNewStringURLSanitizer(t *testing.T) { + var cases = []struct { + input string + placeholder bool + expected string + }{ + // case 0 + { + "https://github.com/go-gitea/test_repo.git", + true, + "https://github.com/go-gitea/test_repo.git", + }, + // case 1 + { + "https://github.com/go-gitea/test_repo.git", + false, + "https://github.com/go-gitea/test_repo.git", + }, + // case 2 + { + "https://mytoken@github.com/go-gitea/test_repo.git", + true, + "https://" + userPlaceholder + "@github.com/go-gitea/test_repo.git", + }, + // case 3 + { + "https://mytoken@github.com/go-gitea/test_repo.git", + false, + "https://github.com/go-gitea/test_repo.git", + }, + // case 4 + { + "https://user:password@github.com/go-gitea/test_repo.git", + true, + "https://" + userPlaceholder + "@github.com/go-gitea/test_repo.git", + }, + // case 5 + { + "https://user:password@github.com/go-gitea/test_repo.git", + false, + "https://github.com/go-gitea/test_repo.git", + }, + // case 6 + { + "https://gi\nthub.com/go-gitea/test_repo.git", + false, + unparsableURL, + }, + } + + for n, c := range cases { + // uses NewURLSanitizer internally + result := NewStringURLSanitizer(c.input, c.placeholder).Replace(c.input) + + assert.Equal(t, c.expected, result, "case %d: error should match", n) + } +} + +func TestNewStringURLSanitizedError(t *testing.T) { + var cases = []struct { + input string + placeholder bool + expected string + }{ + // case 0 + { + "https://github.com/go-gitea/test_repo.git", + true, + "https://github.com/go-gitea/test_repo.git", + }, + // case 1 + { + "https://github.com/go-gitea/test_repo.git", + false, + "https://github.com/go-gitea/test_repo.git", + }, + // case 2 + { + "https://mytoken@github.com/go-gitea/test_repo.git", + true, + "https://" + userPlaceholder + "@github.com/go-gitea/test_repo.git", + }, + // case 3 + { + "https://mytoken@github.com/go-gitea/test_repo.git", + false, + "https://github.com/go-gitea/test_repo.git", + }, + // case 4 + { + "https://user:password@github.com/go-gitea/test_repo.git", + true, + "https://" + userPlaceholder + "@github.com/go-gitea/test_repo.git", + }, + // case 5 + { + "https://user:password@github.com/go-gitea/test_repo.git", + false, + "https://github.com/go-gitea/test_repo.git", + }, + // case 6 + { + "https://gi\nthub.com/go-gitea/test_repo.git", + false, + unparsableURL, + }, + } + + encloseText := func(input string) string { + return "test " + input + " test" + } + + for n, c := range cases { + err := errors.New(encloseText(c.input)) + + result := NewStringURLSanitizedError(err, c.input, c.placeholder) + + assert.Equal(t, encloseText(c.expected), result.Error(), "case %d: error should match", n) } } diff --git a/modules/util/shellquote_test.go b/modules/util/shellquote_test.go index 2ddc6d763d86..be6ba372fdfb 100644 --- a/modules/util/shellquote_test.go +++ b/modules/util/shellquote_test.go @@ -33,7 +33,7 @@ func TestShellEscape(t *testing.T) { "~git/Gitea v1.13/gitea", `~git/"Gitea v1.13/gitea"`, }, { - "Bangs are unforutunately not predictable so need to be singlequoted", + "Bangs are unfortunately not predictable so need to be singlequoted", "C:/Program Files/Gitea!/gitea", `'C:/Program Files/Gitea!/gitea'`, }, { @@ -41,7 +41,7 @@ func TestShellEscape(t *testing.T) { "/home/git/Gitea\n\nWHY-WOULD-YOU-DO-THIS\n\nGitea/gitea", "'/home/git/Gitea\n\nWHY-WOULD-YOU-DO-THIS\n\nGitea/gitea'", }, { - "Similarly we should nicely handle mutiple single quotes if we have to single-quote", + "Similarly we should nicely handle multiple single quotes if we have to single-quote", "'!''!'''!''!'!'", `\''!'\'\''!'\'\'\''!'\'\''!'\''!'\'`, }, { diff --git a/modules/util/util.go b/modules/util/util.go index 9de1710ac7ba..d26e6f13e4b6 100644 --- a/modules/util/util.go +++ b/modules/util/util.go @@ -6,7 +6,9 @@ package util import ( "bytes" + "crypto/rand" "errors" + "math/big" "strings" ) @@ -124,3 +126,28 @@ func MergeInto(dict map[string]interface{}, values ...interface{}) (map[string]i return dict, nil } + +// RandomInt returns a random integer between 0 and limit, inclusive +func RandomInt(limit int64) (int64, error) { + int, err := rand.Int(rand.Reader, big.NewInt(limit)) + if err != nil { + return 0, err + } + return int.Int64(), nil +} + +const letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" + +// RandomString generates a random alphanumerical string +func RandomString(length int64) (string, error) { + bytes := make([]byte, length) + limit := int64(len(letters)) + for i := range bytes { + num, err := RandomInt(limit) + if err != nil { + return "", err + } + bytes[i] = letters[num] + } + return string(bytes), nil +} diff --git a/modules/util/util_test.go b/modules/util/util_test.go index 1d4f23de9043..f82671787cfb 100644 --- a/modules/util/util_test.go +++ b/modules/util/util_test.go @@ -5,6 +5,7 @@ package util import ( + "regexp" "strings" "testing" @@ -118,3 +119,40 @@ func Test_NormalizeEOL(t *testing.T) { assert.Equal(t, []byte("mix\nand\nmatch\n."), NormalizeEOL([]byte("mix\r\nand\rmatch\n."))) } + +func Test_RandomInt(t *testing.T) { + int, err := RandomInt(255) + assert.True(t, int >= 0) + assert.True(t, int <= 255) + assert.NoError(t, err) +} + +func Test_RandomString(t *testing.T) { + str1, err := RandomString(32) + assert.NoError(t, err) + matches, err := regexp.MatchString(`^[a-zA-Z0-9]{32}$`, str1) + assert.NoError(t, err) + assert.True(t, matches) + + str2, err := RandomString(32) + assert.NoError(t, err) + matches, err = regexp.MatchString(`^[a-zA-Z0-9]{32}$`, str1) + assert.NoError(t, err) + assert.True(t, matches) + + assert.NotEqual(t, str1, str2) + + str3, err := RandomString(256) + assert.NoError(t, err) + matches, err = regexp.MatchString(`^[a-zA-Z0-9]{256}$`, str3) + assert.NoError(t, err) + assert.True(t, matches) + + str4, err := RandomString(256) + assert.NoError(t, err) + matches, err = regexp.MatchString(`^[a-zA-Z0-9]{256}$`, str4) + assert.NoError(t, err) + assert.True(t, matches) + + assert.NotEqual(t, str3, str4) +} diff --git a/modules/validation/binding.go b/modules/validation/binding.go index 5cfd994d2dae..5d5c64611f29 100644 --- a/modules/validation/binding.go +++ b/modules/validation/binding.go @@ -19,6 +19,9 @@ const ( // ErrGlobPattern is returned when glob pattern is invalid ErrGlobPattern = "GlobPattern" + + // ErrRegexPattern is returned when a regex pattern is invalid + ErrRegexPattern = "RegexPattern" ) var ( @@ -52,7 +55,10 @@ func CheckGitRefAdditionalRulesValid(name string) bool { func AddBindingRules() { addGitRefNameBindingRule() addValidURLBindingRule() + addValidSiteURLBindingRule() addGlobPatternRule() + addRegexPatternRule() + addGlobOrRegexPatternRule() } func addGitRefNameBindingRule() { @@ -97,19 +103,17 @@ func addValidURLBindingRule() { }) } -func addGlobPatternRule() { +func addValidSiteURLBindingRule() { + // URL validation rule binding.AddRule(&binding.Rule{ IsMatch: func(rule string) bool { - return rule == "GlobPattern" + return strings.HasPrefix(rule, "ValidSiteUrl") }, IsValid: func(errs binding.Errors, name string, val interface{}) (bool, binding.Errors) { str := fmt.Sprintf("%v", val) - - if len(str) != 0 { - if _, err := glob.Compile(str); err != nil { - errs.Add([]string{name}, ErrGlobPattern, err.Error()) - return false, errs - } + if len(str) != 0 && !IsValidSiteURL(str) { + errs.Add([]string{name}, binding.ERR_URL, "Url") + return false, errs } return true, errs @@ -117,6 +121,64 @@ func addGlobPatternRule() { }) } +func addGlobPatternRule() { + binding.AddRule(&binding.Rule{ + IsMatch: func(rule string) bool { + return rule == "GlobPattern" + }, + IsValid: globPatternValidator, + }) +} + +func globPatternValidator(errs binding.Errors, name string, val interface{}) (bool, binding.Errors) { + str := fmt.Sprintf("%v", val) + + if len(str) != 0 { + if _, err := glob.Compile(str); err != nil { + errs.Add([]string{name}, ErrGlobPattern, err.Error()) + return false, errs + } + } + + return true, errs +} + +func addRegexPatternRule() { + binding.AddRule(&binding.Rule{ + IsMatch: func(rule string) bool { + return rule == "RegexPattern" + }, + IsValid: regexPatternValidator, + }) +} + +func regexPatternValidator(errs binding.Errors, name string, val interface{}) (bool, binding.Errors) { + str := fmt.Sprintf("%v", val) + + if _, err := regexp.Compile(str); err != nil { + errs.Add([]string{name}, ErrRegexPattern, err.Error()) + return false, errs + } + + return true, errs +} + +func addGlobOrRegexPatternRule() { + binding.AddRule(&binding.Rule{ + IsMatch: func(rule string) bool { + return rule == "GlobOrRegexPattern" + }, + IsValid: func(errs binding.Errors, name string, val interface{}) (bool, binding.Errors) { + str := strings.TrimSpace(fmt.Sprintf("%v", val)) + + if len(str) >= 2 && strings.HasPrefix(str, "/") && strings.HasSuffix(str, "/") { + return regexPatternValidator(errs, name, str[1:len(str)-1]) + } + return globPatternValidator(errs, name, val) + }, + }) +} + func portOnly(hostport string) string { colon := strings.IndexByte(hostport, ':') if colon == -1 { diff --git a/modules/validation/binding_test.go b/modules/validation/binding_test.go index e0daba89e502..d3b4e686ae29 100644 --- a/modules/validation/binding_test.go +++ b/modules/validation/binding_test.go @@ -26,9 +26,10 @@ type ( } TestForm struct { - BranchName string `form:"BranchName" binding:"GitRefName"` - URL string `form:"ValidUrl" binding:"ValidUrl"` - GlobPattern string `form:"GlobPattern" binding:"GlobPattern"` + BranchName string `form:"BranchName" binding:"GitRefName"` + URL string `form:"ValidUrl" binding:"ValidUrl"` + GlobPattern string `form:"GlobPattern" binding:"GlobPattern"` + RegexPattern string `form:"RegexPattern" binding:"RegexPattern"` } ) diff --git a/modules/validation/helpers.go b/modules/validation/helpers.go index c22e667a2ebf..617ec3578c8f 100644 --- a/modules/validation/helpers.go +++ b/modules/validation/helpers.go @@ -52,6 +52,25 @@ func IsValidURL(uri string) bool { return true } +// IsValidSiteURL checks if URL is valid +func IsValidSiteURL(uri string) bool { + u, err := url.ParseRequestURI(uri) + if err != nil { + return false + } + + if !validPort(portOnly(u.Host)) { + return false + } + + for _, scheme := range setting.Service.ValidSiteURLSchemes { + if scheme == u.Scheme { + return true + } + } + return false +} + // IsAPIURL checks if URL is current Gitea instance API URL func IsAPIURL(uri string) bool { return strings.HasPrefix(strings.ToLower(uri), strings.ToLower(setting.AppURL+"api")) @@ -73,7 +92,7 @@ func IsValidExternalURL(uri string) bool { return false } - // TODO: Later it should be added to allow local network IP addreses + // TODO: Later it should be added to allow local network IP addresses // only if allowed by special setting return true diff --git a/modules/validation/helpers_test.go b/modules/validation/helpers_test.go index cc2a4b720d7a..f6f897e8210d 100644 --- a/modules/validation/helpers_test.go +++ b/modules/validation/helpers_test.go @@ -24,12 +24,12 @@ func Test_IsValidURL(t *testing.T) { valid: false, }, { - description: "Loobpack IPv4 URL", + description: "Loopback IPv4 URL", url: "http://127.0.1.1:5678/", valid: true, }, { - description: "Loobpack IPv6 URL", + description: "Loopback IPv6 URL", url: "https://[::1]/", valid: true, }, @@ -61,7 +61,7 @@ func Test_IsValidExternalURL(t *testing.T) { valid: true, }, { - description: "Loobpack IPv4 URL", + description: "Loopback IPv4 URL", url: "http://127.0.1.1:5678/", valid: false, }, diff --git a/modules/validation/refname_test.go b/modules/validation/refname_test.go index 974d9565632d..2d6458b9b5a5 100644 --- a/modules/validation/refname_test.go +++ b/modules/validation/refname_test.go @@ -12,7 +12,7 @@ import ( var gitRefNameValidationTestCases = []validationTestCase{ { - description: "Referece contains only characters", + description: "Reference name contains only characters", data: TestForm{ BranchName: "test", }, diff --git a/modules/validation/regex_pattern_test.go b/modules/validation/regex_pattern_test.go new file mode 100644 index 000000000000..afe1bcf425df --- /dev/null +++ b/modules/validation/regex_pattern_test.go @@ -0,0 +1,60 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package validation + +import ( + "regexp" + "testing" + + "gitea.com/go-chi/binding" +) + +func getRegexPatternErrorString(pattern string) string { + if _, err := regexp.Compile(pattern); err != nil { + return err.Error() + } + return "" +} + +var regexValidationTestCases = []validationTestCase{ + { + description: "Empty regex pattern", + data: TestForm{ + RegexPattern: "", + }, + expectedErrors: binding.Errors{}, + }, + { + description: "Valid regex", + data: TestForm{ + RegexPattern: `(\d{1,3})+`, + }, + expectedErrors: binding.Errors{}, + }, + + { + description: "Invalid regex", + data: TestForm{ + RegexPattern: "[a-", + }, + expectedErrors: binding.Errors{ + binding.Error{ + FieldNames: []string{"RegexPattern"}, + Classification: ErrRegexPattern, + Message: getRegexPatternErrorString("[a-"), + }, + }, + }, +} + +func Test_RegexPatternValidation(t *testing.T) { + AddBindingRules() + + for _, testCase := range regexValidationTestCases { + t.Run(testCase.description, func(t *testing.T) { + performValidationTest(t, testCase) + }) + } +} diff --git a/modules/web/middleware/binding.go b/modules/web/middleware/binding.go index cd418c9792b7..cbdb29b81294 100644 --- a/modules/web/middleware/binding.go +++ b/modules/web/middleware/binding.go @@ -135,6 +135,8 @@ func Validate(errs binding.Errors, data map[string]interface{}, f Form, l transl data["ErrorMsg"] = trName + l.Tr("form.include_error", GetInclude(field)) case validation.ErrGlobPattern: data["ErrorMsg"] = trName + l.Tr("form.glob_pattern_error", errs[0].Message) + case validation.ErrRegexPattern: + data["ErrorMsg"] = trName + l.Tr("form.regex_pattern_error", errs[0].Message) default: data["ErrorMsg"] = l.Tr("form.unknown_error") + " " + errs[0].Classification } diff --git a/modules/web/middleware/cookie.go b/modules/web/middleware/cookie.go index cfcc2bbac787..f44d2c3688c5 100644 --- a/modules/web/middleware/cookie.go +++ b/modules/web/middleware/cookie.go @@ -149,7 +149,7 @@ func SetCookie(resp http.ResponseWriter, name string, value string, others ...in if len(others) > 2 { if v, ok := others[2].(string); ok && len(v) > 0 { cookie.Domain = v - } else if v, ok := others[1].(func(*http.Cookie)); ok { + } else if v, ok := others[2].(func(*http.Cookie)); ok { v(&cookie) } } @@ -170,7 +170,7 @@ func SetCookie(resp http.ResponseWriter, name string, value string, others ...in if len(others) > 4 { if v, ok := others[4].(bool); ok && v { cookie.HttpOnly = true - } else if v, ok := others[1].(func(*http.Cookie)); ok { + } else if v, ok := others[4].(func(*http.Cookie)); ok { v(&cookie) } } @@ -179,7 +179,7 @@ func SetCookie(resp http.ResponseWriter, name string, value string, others ...in if v, ok := others[5].(time.Time); ok { cookie.Expires = v cookie.RawExpires = v.Format(time.UnixDate) - } else if v, ok := others[1].(func(*http.Cookie)); ok { + } else if v, ok := others[5].(func(*http.Cookie)); ok { v(&cookie) } } diff --git a/modules/web/route.go b/modules/web/route.go index 6f9e76bdf389..319d08f5981c 100644 --- a/modules/web/route.go +++ b/modules/web/route.go @@ -5,6 +5,7 @@ package web import ( + goctx "context" "fmt" "net/http" "reflect" @@ -27,6 +28,7 @@ func Wrap(handlers ...interface{}) http.HandlerFunc { switch t := handler.(type) { case http.HandlerFunc, func(http.ResponseWriter, *http.Request), func(ctx *context.Context), + func(ctx *context.Context) goctx.CancelFunc, func(*context.APIContext), func(*context.PrivateContext), func(http.Handler) http.Handler: @@ -48,6 +50,15 @@ func Wrap(handlers ...interface{}) http.HandlerFunc { if r, ok := resp.(context.ResponseWriter); ok && r.Status() > 0 { return } + case func(ctx *context.Context) goctx.CancelFunc: + ctx := context.GetContext(req) + cancel := t(ctx) + if cancel != nil { + defer cancel() + } + if ctx.Written() { + return + } case func(ctx *context.Context): ctx := context.GetContext(req) t(ctx) @@ -94,6 +105,23 @@ func Middle(f func(ctx *context.Context)) func(netx http.Handler) http.Handler { } } +// MiddleCancel wrap a context function as a chi middleware +func MiddleCancel(f func(ctx *context.Context) goctx.CancelFunc) func(netx http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { + ctx := context.GetContext(req) + cancel := f(ctx) + if cancel != nil { + defer cancel() + } + if ctx.Written() { + return + } + next.ServeHTTP(ctx.Resp, ctx.Req) + }) + } +} + // MiddleAPI wrap a context function as a chi middleware func MiddleAPI(f func(ctx *context.APIContext)) func(netx http.Handler) http.Handler { return func(next http.Handler) http.Handler { @@ -163,6 +191,8 @@ func (r *Route) Use(middlewares ...interface{}) { r.R.Use(t) case func(*context.Context): r.R.Use(Middle(t)) + case func(*context.Context) goctx.CancelFunc: + r.R.Use(MiddleCancel(t)) case func(*context.APIContext): r.R.Use(MiddleAPI(t)) default: diff --git a/options/gitignore/AltiumDesigner b/options/gitignore/AltiumDesigner new file mode 100644 index 000000000000..5e410492cb66 --- /dev/null +++ b/options/gitignore/AltiumDesigner @@ -0,0 +1,20 @@ +# For PCBs designed using Altium Designer +# Website: https://www.altium.com/altium-designer/ + +# Directories containing cache data +History +__Previews + +# Directories containing logs and generated outputs +Project\ Logs* +Project\ Outputs* + +# Misc files generated by altium +debug.log +Status\ Report.txt +*.PcbDoc.htm +*.SchDocPreview +*.PcbDocPreview + +# Lock files sometimes left behind +.~lock.* diff --git a/options/gitignore/Android b/options/gitignore/Android index 23de6e20a61f..5d18272eb42e 100644 --- a/options/gitignore/Android +++ b/options/gitignore/Android @@ -44,6 +44,7 @@ captures/ .idea/assetWizardSettings.xml .idea/dictionaries .idea/libraries +.idea/jarRepositories.xml # Android Studio 3 in .gitignore file. .idea/caches .idea/modules.xml diff --git a/options/gitignore/Autotools b/options/gitignore/Autotools index f2c137d046a6..617156f8192d 100644 --- a/options/gitignore/Autotools +++ b/options/gitignore/Autotools @@ -16,6 +16,7 @@ autom4te.cache /autoscan-*.log /aclocal.m4 /compile +/config.cache /config.guess /config.h.in /config.log @@ -44,8 +45,8 @@ m4/ltsugar.m4 m4/ltversion.m4 m4/lt~obsolete.m4 -# Generated Makefile -# (meta build system like autotools, +# Generated Makefile +# (meta build system like autotools, # can automatically generate from config.status script # (which is called by configure script)) Makefile diff --git a/options/gitignore/Coq b/options/gitignore/Coq index 829ac44a1c7b..66596b22ed3a 100644 --- a/options/gitignore/Coq +++ b/options/gitignore/Coq @@ -10,6 +10,7 @@ *.glob *.ml.d *.ml4.d +*.mlg.d *.mli.d *.mllib.d *.mlpack.d @@ -20,7 +21,7 @@ *.vo *.vok *.vos -.coq-native/ +.coq-native .csdp.cache .lia.cache .nia.cache @@ -31,6 +32,7 @@ lia.cache nia.cache nlia.cache nra.cache +native_compute_profile_*.data # generated timing files *.timing.diff diff --git a/options/gitignore/Dart b/options/gitignore/Dart index 6d21af37c97b..3a83c2f087b9 100644 --- a/options/gitignore/Dart +++ b/options/gitignore/Dart @@ -11,6 +11,9 @@ pubspec.lock # If you don't generate documentation locally you can remove this line. doc/api/ +# dotenv environment variables file +.env* + # Avoid committing generated Javascript files: *.dart.js *.info.json # Produced by the --dump-info flag. diff --git a/options/gitignore/Gradle b/options/gitignore/Gradle index 8d68edc977c1..85888bb2e144 100644 --- a/options/gitignore/Gradle +++ b/options/gitignore/Gradle @@ -10,6 +10,3 @@ gradle-app.setting # Cache of project .gradletasknamecache - -# # Work around https://youtrack.jetbrains.com/issue/IDEA-116898 -# gradle/wrapper/gradle-wrapper.properties diff --git a/options/gitignore/Gretl b/options/gitignore/Gretl new file mode 100644 index 000000000000..582489e54222 --- /dev/null +++ b/options/gitignore/Gretl @@ -0,0 +1,8 @@ +# gitignore template for Gretl +# website: http://gretl.sourceforge.net/ + +# Auto-generated log file is overwritten whenever you start a new session +session.inp + +# Auto-generated temporary string code table +string_table.txt diff --git a/options/gitignore/IAR_EWARM b/options/gitignore/IAR_EWARM index 13ed9a0b1922..e456471f66ac 100644 --- a/options/gitignore/IAR_EWARM +++ b/options/gitignore/IAR_EWARM @@ -1,5 +1,5 @@ # gitignore template for the IAR EWARM -# website: https://www.iar.com/ +# website: https://www.iar.com/knowledge/support/technical-notes/ide/which-files-should-be-version-controlled/ # Some tools will put the EWARM files # under a subdirectory with the same name diff --git a/options/gitignore/JetBrains b/options/gitignore/JetBrains index 8da0824ba549..0a16fa718cd2 100644 --- a/options/gitignore/JetBrains +++ b/options/gitignore/JetBrains @@ -8,6 +8,9 @@ .idea/**/dictionaries .idea/**/shelf +# AWS User-specific +.idea/**/aws.xml + # Generated files .idea/**/contentModel.xml diff --git a/options/gitignore/Node b/options/gitignore/Node index 1f22b9c26a3d..0125458e15f0 100644 --- a/options/gitignore/Node +++ b/options/gitignore/Node @@ -5,6 +5,7 @@ npm-debug.log* yarn-debug.log* yarn-error.log* lerna-debug.log* +.pnpm-debug.log* # Diagnostic reports (https://nodejs.org/api/report.html) report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json @@ -71,6 +72,7 @@ web_modules/ # dotenv environment variables file .env .env.test +.env.production # parcel-bundler cache (https://parceljs.org/) .cache diff --git a/options/gitignore/ROS2 b/options/gitignore/ROS2 new file mode 100644 index 000000000000..6cc824d8e6d9 --- /dev/null +++ b/options/gitignore/ROS2 @@ -0,0 +1,29 @@ +install/ +log/ +build/ + +# Ignore generated docs +*.dox +*.wikidoc + +# eclipse stuff +.project +.cproject + +# qcreator stuff +CMakeLists.txt.user + +srv/_*.py +*.pcd +*.pyc +qtcreator-* +*.user + +*~ + +# Emacs +.#* + +# Colcon custom files +COLCON_IGNORE +AMENT_IGNORE diff --git a/options/gitignore/Rust b/options/gitignore/Rust index ff47c2d77d91..6985cf1bd09d 100644 --- a/options/gitignore/Rust +++ b/options/gitignore/Rust @@ -9,3 +9,6 @@ Cargo.lock # These are backup files generated by rustfmt **/*.rs.bk + +# MSVC Windows builds of rustc generate these, which store debugging information +*.pdb diff --git a/options/gitignore/SPFx b/options/gitignore/SPFx new file mode 100644 index 000000000000..5a66b3419a4d --- /dev/null +++ b/options/gitignore/SPFx @@ -0,0 +1,33 @@ +#SharePoint Framework (SPFx) +# Logs +logs +*.log +npm-debug.log* + +# Dependency directories +node_modules + +# Build generated files +dist +lib +solution +temp +*.sppkg + +# Coverage directory used by tools like istanbul +coverage + +# OSX +.DS_Store + +# Visual Studio files +.ntvs_analysis.dat +.vs +bin +obj + +# Resx Generated Code +*.resx.ts + +# Styles Generated Code +*.scss.ts diff --git a/options/gitignore/Scala b/options/gitignore/Scala index 9c07d4ae9884..7169cab19511 100644 --- a/options/gitignore/Scala +++ b/options/gitignore/Scala @@ -1,2 +1,5 @@ *.class *.log + +# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml +hs_err_pid* diff --git a/options/gitignore/Strapi b/options/gitignore/Strapi new file mode 100644 index 000000000000..70e6542590a9 --- /dev/null +++ b/options/gitignore/Strapi @@ -0,0 +1,135 @@ +############################ +# OS X +############################ + +.DS_Store +.AppleDouble +.LSOverride +Icon +.Spotlight-V100 +.Trashes +._* + + +############################ +# Linux +############################ + +*~ + + +############################ +# Windows +############################ + +Thumbs.db +ehthumbs.db +Desktop.ini +$RECYCLE.BIN/ +*.cab +*.msi +*.msm +*.msp + + +############################ +# Packages +############################ + +*.7z +*.csv +*.dat +*.dmg +*.gz +*.iso +*.jar +*.rar +*.tar +*.zip +*.com +*.class +*.dll +*.exe +*.o +*.seed +*.so +*.swo +*.swp +*.swn +*.swm +*.out +*.pid + + +############################ +# Logs and databases +############################ + +.tmp +*.log +*.sql +*.sqlite + + +############################ +# Misc. +############################ + +*# +.idea +nbproject +.vscode/ + + +############################ +# Node.js +############################ + +lib-cov +lcov.info +pids +logs +results +build +node_modules +.node_history +package-lock.json +**/package-lock.json +!docs/package-lock.json +*.heapsnapshot + + +############################ +# Tests +############################ + +testApp +coverage +cypress/screenshots +cypress/videos + + +############################ +# Documentation +############################ + +dist + +############################ +# Builds +############################ + +packages/strapi-generate-new/files/public/ + +############################ +# Example app +############################ + +.dev +# *.cache + +############################ +# Visual Studio Code +############################ + +front-workspace.code-workspace diff --git a/options/gitignore/TeX b/options/gitignore/TeX index 8a42ebbd98cd..237f49ebaa20 100644 --- a/options/gitignore/TeX +++ b/options/gitignore/TeX @@ -120,6 +120,7 @@ acs-*.bib # gregoriotex *.gaux +*.glog *.gtex # htlatex @@ -166,6 +167,9 @@ _minted* # morewrites *.mw +# newpax +*.newpax + # nomencl *.nlg *.nlo diff --git a/options/gitignore/TwinCAT3 b/options/gitignore/TwinCAT3 new file mode 100644 index 000000000000..7bd6f87505cb --- /dev/null +++ b/options/gitignore/TwinCAT3 @@ -0,0 +1,25 @@ +# gitignore template for TwinCAT3 +# website: https://www.beckhoff.com/twincat3/ +# +# Recommended: VisualStudio.gitignore + +# TwinCAT files +*.tpy +*.tclrs +*.compiled-library +*.compileinfo +# Don't include the tmc-file rule if either of the following is true: +# 1. You've got TwinCAT C++ projects, as the information in the TMC-file is created manually for the C++ projects (in that case, only (manually) ignore the tmc-files for the PLC projects) +# 2. You've created a standalone PLC-project and added events to it, as these are stored in the TMC-file. +*.tmc +*.tmcRefac +*.library +*.project.~u +*.tsproj.bak +*.xti.bak +LineIDs.dbg +LineIDs.dbg.bak +_Boot/ +_CompileInfo/ +_Libraries/ +_ModuleInstall/ \ No newline at end of file diff --git a/options/gitignore/Umbraco b/options/gitignore/Umbraco index c286845766db..5649531412a3 100644 --- a/options/gitignore/Umbraco +++ b/options/gitignore/Umbraco @@ -15,7 +15,7 @@ **/App_Data/umbraco.config ## this [Uu]mbraco/ folder should be created by cmd like `Install-Package UmbracoCms -Version 8.5.3` -## you can find your umbraco version at your Web.config. (i.e. ) +## you can find your Umbraco version in your Web.config. (i.e. ) ## Uncomment this line if you think it fits the way you work on your project. ## **/[Uu]mbraco/ @@ -29,4 +29,4 @@ **/App_Data/cache/ # Ignore the Models Builder models out of date flag -**/App_Data/Models/ood.flag +**/ood.flag diff --git a/options/gitignore/V b/options/gitignore/V new file mode 100644 index 000000000000..dbbb0462fbae --- /dev/null +++ b/options/gitignore/V @@ -0,0 +1,11 @@ +*.exe +*.o +*.so +*.tmp.c +*.exp +*.ilk +*.pdb +*.dll +*.lib +*.bak +*.out diff --git a/options/gitignore/VisualStudio b/options/gitignore/VisualStudio index 1ee53850b84c..34c8dee45388 100644 --- a/options/gitignore/VisualStudio +++ b/options/gitignore/VisualStudio @@ -90,6 +90,7 @@ StyleCopReport.xml *.tmp_proj *_wpftmp.csproj *.log +*.tlog *.vspscc *.vssscc .builds @@ -205,6 +206,9 @@ PublishScripts/ *.nuget.props *.nuget.targets +# Nuget personal access tokens and Credentials +nuget.config + # Microsoft Azure Build Output csx/ *.build.csdef @@ -360,3 +364,25 @@ MigrationBackup/ # Fody - auto-generated XML schema FodyWeavers.xsd + +# VS Code files for those working on multiple tools +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +*.code-workspace + +# Local History for Visual Studio Code +.history/ + +# Windows Installer files from build outputs +*.cab +*.msi +*.msix +*.msm +*.msp + +# JetBrains Rider +.idea/ +*.sln.iml diff --git a/options/gitignore/Xojo b/options/gitignore/Xojo index 1b036dd4f2eb..4915783bf0d9 100644 --- a/options/gitignore/Xojo +++ b/options/gitignore/Xojo @@ -8,4 +8,4 @@ Debug*/Debug*.exe Debug*/Debug*\ Libs *.rbuistate *.xojo_uistate -*.obsolete +*.obsolete* diff --git a/options/license/0BSD b/options/license/0BSD index 72c7baf54c2f..0b8ae762b240 100644 --- a/options/license/0BSD +++ b/options/license/0BSD @@ -1,4 +1,4 @@ -Copyright (C) 2006 by Rob Landley +Copyright (C) YEAR by AUTHOR EMAIL Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted. diff --git a/options/license/BSD-3-Clause-No-Military-License b/options/license/BSD-3-Clause-No-Military-License new file mode 100644 index 000000000000..e06aa93b5139 --- /dev/null +++ b/options/license/BSD-3-Clause-No-Military-License @@ -0,0 +1,16 @@ +Copyright (c) year copyright holder. All Rights Reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. +Redistribution of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. +Redistribution in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. +Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +YOU ACKNOWLEDGE THAT THIS SOFTWARE IS NOT DESIGNED, LICENSED OR INTENDED FOR USE IN THE DESIGN, CONSTRUCTION, OPERATION OR MAINTENANCE OF ANY MILITARY FACILITY. diff --git a/options/license/CAL-1.0 b/options/license/CAL-1.0 index e0ccf819dc06..4cebc6d54df6 100644 --- a/options/license/CAL-1.0 +++ b/options/license/CAL-1.0 @@ -135,6 +135,11 @@ Code corresponding to the modifications in the Modified Work must be provided to the Recipient either a) under this License, or b) under a Compatible Open Source License. +A “Compatible Open Source License” means a license accepted by the Open Source +Initiative that allows object code created using both Source Code provided under +this License and Source Code provided under the other open source license to be +distributed together as a single work. + #### 4.1.3. Coordinated Disclosure of Security Vulnerabilities You may delay providing the Source Code corresponding to a particular diff --git a/options/license/CAL-1.0-Combined-Work-Exception b/options/license/CAL-1.0-Combined-Work-Exception index e0ccf819dc06..4cebc6d54df6 100644 --- a/options/license/CAL-1.0-Combined-Work-Exception +++ b/options/license/CAL-1.0-Combined-Work-Exception @@ -135,6 +135,11 @@ Code corresponding to the modifications in the Modified Work must be provided to the Recipient either a) under this License, or b) under a Compatible Open Source License. +A “Compatible Open Source License” means a license accepted by the Open Source +Initiative that allows object code created using both Source Code provided under +this License and Source Code provided under the other open source license to be +distributed together as a single work. + #### 4.1.3. Coordinated Disclosure of Security Vulnerabilities You may delay providing the Source Code corresponding to a particular diff --git a/options/license/CC-BY-2.5-AU b/options/license/CC-BY-2.5-AU new file mode 100644 index 000000000000..23b880091903 --- /dev/null +++ b/options/license/CC-BY-2.5-AU @@ -0,0 +1,112 @@ +Creative Commons Attribution 2.5 Australia + +CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE LEGAL SERVICES. DISTRIBUTION OF THIS LICENCE DOES NOT CREATE AN ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES REGARDING THE INFORMATION PROVIDED, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM ITS USE. + +Licence + +THE WORK (AS DEFINED BELOW) IS PROVIDED UNDER THE TERMS OF THIS CREATIVE COMMONS PUBLIC LICENCE ("CCPL" OR "LICENCE"). THE WORK IS PROTECTED BY COPYRIGHT AND/OR OTHER APPLICABLE LAW. ANY USE OF THE WORK OTHER THAN AS AUTHORISED UNDER THIS LICENCE AND/OR APPLICABLE LAW IS PROHIBITED. + +BY EXERCISING ANY RIGHTS TO THE WORK PROVIDED HERE, YOU ACCEPT AND AGREE TO BE BOUND BY THE TERMS OF THIS LICENCE. THE LICENSOR GRANTS YOU THE RIGHTS CONTAINED HERE IN CONSIDERATION OF YOUR ACCEPTANCE OF SUCH TERMS AND CONDITIONS. + + 1. Definitions + + a. "Collective Work" means a work, such as a periodical issue, anthology or encyclopaedia, in which the Work in its entirety in unmodified form, along with a number of other contributions, constituting separate and independent works in themselves, are assembled into a collective whole. A work that constitutes a Collective Work will not be considered a Derivative Work (as defined below) for the purposes of this Licence. + + b. "Derivative Work" means a work that reproduces a substantial part of the Work, or of the Work and other pre-existing works protected by copyright, or that is an adaptation of a Work that is a literary, dramatic, musical or artistic work. Derivative Works include a translation, musical arrangement, dramatisation, motion picture version, sound recording, art reproduction, abridgment, condensation, or any other form in which a work may be adapted, except that a work that constitutes a Collective Work will not be considered a Derivative Work for the purpose of this Licence. For the avoidance of doubt, where the Work is a musical composition or sound recording, the synchronization of the Work in timed-relation with a moving image ("synching") will be considered a Derivative Work for the purpose of this Licence. + + c. "Licensor" means the individual or entity that offers the Work under the terms of this Licence. + + d. "Moral rights law" means laws under which an individual who creates a work protected by copyright has rights of integrity of authorship of the work, rights of attribution of authorship of the work, rights not to have authorship of the work falsely attributed, or rights of a similar or analogous nature in the work anywhere in the world. + + e. "Original Author" means the individual or entity who created the Work. + + f. "Work" means the work or other subject-matter protected by copyright that is offered under the terms of this Licence, which may include (without limitation) a literary, dramatic, musical or artistic work, a sound recording or cinematograph film, a published edition of a literary, dramatic, musical or artistic work or a television or sound broadcast. + + g. "You" means an individual or entity exercising rights under this Licence who has not previously violated the terms of this Licence with respect to the Work, or who has received express permission from the Licensor to exercise rights under this Licence despite a previous violation. + + h. "Licence Elements" means the following high-level licence attributes as selected by Licensor and indicated in the title of this Licence: Attribution, NonCommercial, NoDerivatives, ShareAlike. + +2. Fair Dealing and Other Rights. Nothing in this Licence excludes or modifies, or is intended to exclude or modify, (including by reducing, limiting, or restricting) the rights of You or others to use the Work arising from fair dealings or other limitations on the rights of the copyright owner or the Original Author under copyright law, moral rights law or other applicable laws. + +3. Licence Grant. Subject to the terms and conditions of this Licence, Licensor hereby grants You a worldwide, royalty-free, non-exclusive, perpetual (for the duration of the applicable copyright) licence to exercise the rights in the Work as stated below: + + a. to reproduce the Work, to incorporate the Work into one or more Collective Works, and to reproduce the Work as incorporated in the Collective Works; + + b. to create and reproduce Derivative Works; + + c. to publish, communicate to the public, distribute copies or records of, exhibit or display publicly, perform publicly and perform publicly by means of a digital audio transmission the Work including as incorporated in Collective Works; + + d. to publish, communicate to the public, distribute copies or records of, exhibit or display publicly, perform publicly, and perform publicly by means of a digital audio transmission Derivative Works; + + e. For the avoidance of doubt, where the Work is a musical composition: + + i. Performance Royalties Under Blanket Licences. Licensor will not collect, whether individually or via a performance rights society, royalties for Your communication to the public, broadcast, public performance or public digital performance (e.g. webcast) of the Work. + + ii. Mechanical Rights and Statutory Royalties. Licensor will not collect, whether individually or via a music rights agency, designated agent or a music publisher, royalties for any record You create from the Work ("cover version") and distribute, subject to the compulsory licence created by 17 USC Section 115 of the US Copyright Act (or an equivalent statutory licence under the Australian Copyright Act or in other jurisdictions). + + + f. Webcasting Rights and Statutory Royalties. For the avoidance of doubt, where the Work is a sound recording, Licensor will not collect, whether individually or via a performance-rights society, royalties for Your public digital performance (e.g. webcast) of the Work, subject to the compulsory licence created by 17 USC Section 114 of the US Copyright Act (or the equivalent in other jurisdictions). + +The above rights may be exercised in all media and formats whether now known or hereafter devised. The above rights include the right to make such modifications as are technically necessary to exercise the rights in other media and formats. All rights not expressly granted by Licensor under this Licence are hereby reserved. + +4. Restrictions. The licence granted in Section 3 above is expressly made subject to and limited by the following restrictions: + + a. You may publish, communicate to the public, distribute, publicly exhibit or display, publicly perform, or publicly digitally perform the Work only under the terms of this Licence, and You must include a copy of, or the Uniform Resource Identifier for, this Licence with every copy or record of the Work You publish, communicate to the public, distribute, publicly exhibit or display, publicly perform or publicly digitally perform. You may not offer or impose any terms on the Work that exclude, alter or restrict the terms of this Licence or the recipients' exercise of the rights granted hereunder. You may not sublicense the Work. You must keep intact all notices that refer to this Licence and to the disclaimer of representations and warranties. You may not publish, communicate to the public, distribute, publicly exhibit or display, publicly perform, or publicly digitally perform the Work with any technological measures that control access or use of the Work in a manner inconsistent with the terms of this Licence. The above applies to the Work as incorporated in a Collective Work, but this does not require the Collective Work apart from the Work itself to be made subject to the terms of this Licence. If You create a Collective Work, upon notice from any Licensor You must, to the extent practicable, remove from the Collective Work any credit as required by Section 4(b), as requested. If You create a Derivative Work, upon notice from any Licensor You must, to the extent practicable, remove from the Derivative Work any credit as required by Section 4(b), as requested. + + b. If you publish, communicate to the public, distribute, publicly exhibit or display, publicly perform, or publicly digitally perform the Work or any Derivative Works or Collective Works, You must keep intact all copyright notices for the Work. You must also give clear and reasonably prominent credit to (i) the Original Author (by name or pseudonym if applicable), if the name or pseudonym is supplied; and (ii) if another party or parties (eg a sponsor institute, publishing entity or journal) is designated for attribution in the copyright notice, terms of service or other reasonable means associated with the Work, such party or parties. If applicable, that credit must be given in the particular way made known by the Original Author and otherwise as reasonable to the medium or means You are utilizing, by conveying the identity of the Original Author and the other designated party or parties (if applicable); the title of the Work if supplied; to the extent reasonably practicable, the Uniform Resource Identifier, if any, that Licensor specifies to be associated with the Work, unless such URI does not refer to the copyright notice or licensing information for the Work; and in the case of a Derivative Work, a credit identifying the use of the Work in the Derivative Work (e.g., "French translation of the Work by Original Author," or "Screenplay based on original Work by Original Author"). Such credit may be implemented in any reasonable manner; provided, however, that in the case of a Derivative Work or Collective Work, at a minimum such credit will appear where any other comparable authorship credit appears and in a manner at least as prominent as such other comparable authorship credit. + + c. False attribution prohibited. Except as otherwise agreed in writing by the Licensor, if You publish, communicate to the public, distribute, publicly exhibit or display, publicly perform, or publicly digitally perform the Work or any Derivative Works or Collective Works in accordance with this Licence, You must not falsely attribute the Work to someone other than the Original Author. + + d. Prejudice to honour or reputation prohibited. Except as otherwise agreed in writing by the Licensor, if you publish, communicate to the public, distribute, publicly exhibit or display, publicly perform, or publicly digitally perform the Work or any Derivative Works or Collective Works, You must not do anything that results in a material distortion of, the mutilation of, or a material alteration to, the Work that is prejudicial to the Original Author's honour or reputation, and You must not do anything else in relation to the Work that is prejudicial to the Original Author's honour or reputation. + +5. Disclaimer. + +EXCEPT AS EXPRESSLY STATED IN THIS LICENCE OR OTHERWISE MUTUALLY AGREED TO BY THE PARTIES IN WRITING, AND TO THE FULL EXTENT PERMITTED BY APPLICABLE LAW, LICENSOR OFFERS THE WORK "AS-IS" AND MAKES NO REPRESENTATIONS, WARRANTIES OR CONDITIONS OF ANY KIND CONCERNING THE WORK, EXPRESS, IMPLIED, STATUTORY OR OTHERWISE, INCLUDING, WITHOUT LIMITATION, ANY REPRESENTATIONS, WARRANTIES OR CONDITIONS REGARDING THE CONTENTS OR ACCURACY OF THE WORK, OR OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, THE ABSENCE OF LATENT OR OTHER DEFECTS, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT DISCOVERABLE. + +6. Limitation on Liability. + +TO THE FULL EXTENT PERMITTED BY APPLICABLE LAW, AND EXCEPT FOR ANY LIABILITY ARISING FROM CONTRARY MUTUAL AGREEMENT AS REFERRED TO IN SECTION 5, IN NO EVENT WILL LICENSOR BE LIABLE TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, NEGLIGENCE) FOR ANY LOSS OR DAMAGE WHATSOEVER, INCLUDING (WITHOUT LIMITATION) LOSS OF PRODUCTION OR OPERATION TIME, LOSS, DAMAGE OR CORRUPTION OF DATA OR RECORDS; OR LOSS OF ANTICIPATED SAVINGS, OPPORTUNITY, REVENUE, PROFIT OR GOODWILL, OR OTHER ECONOMIC LOSS; OR ANY SPECIAL, INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR EXEMPLARY DAMAGES ARISING OUT OF OR IN CONNECTION WITH THIS LICENCE OR THE USE OF THE WORK, EVEN IF LICENSOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +If applicable legislation implies warranties or conditions, or imposes obligations or liability on the Licensor in respect of this Licence that cannot be wholly or partly excluded, restricted or modified, the Licensor's liability is limited, to the full extent permitted by the applicable legislation, at its option, to: + + a. in the case of goods, any one or more of the following: + + i. the replacement of the goods or the supply of equivalent goods; + + ii. the repair of the goods; + + iii. the payment of the cost of replacing the goods or of acquiring equivalent goods; + + iv. the payment of the cost of having the goods repaired; or + + b. in the case of services: + + i. the supplying of the services again; or + + ii. the payment of the cost of having the services supplied again. + +7. Termination. + + a. This Licence and the rights granted hereunder will terminate automatically upon any breach by You of the terms of this Licence. Individuals or entities who have received Derivative Works or Collective Works from You under this Licence, however, will not have their licences terminated provided such individuals or entities remain in full compliance with those licences. Sections 1, 2, 5, 6, 7, and 8 will survive any termination of this Licence. + + b. Subject to the above terms and conditions, the licence granted here is perpetual (for the duration of the applicable copyright in the Work). Notwithstanding the above, Licensor reserves the right to release the Work under different licence terms or to stop distributing the Work at any time; provided, however that any such election will not serve to withdraw this Licence (or any other licence that has been, or is required to be, granted under the terms of this Licence), and this Licence will continue in full force and effect unless terminated as stated above. + +8. Miscellaneous. + + a. Each time You publish, communicate to the public, distribute or publicly digitally perform the Work or a Collective Work, the Licensor offers to the recipient a licence to the Work on the same terms and conditions as the licence granted to You under this Licence. + + b. Each time You publish, communicate to the public, distribute or publicly digitally perform a Derivative Work, Licensor offers to the recipient a licence to the original Work on the same terms and conditions as the licence granted to You under this Licence. + + c. If any provision of this Licence is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Licence, and without further action by the parties to this agreement, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. + + d. No term or provision of this Licence shall be deemed waived and no breach consented to unless such waiver or consent shall be in writing and signed by the party to be charged with such waiver or consent. + + e. This Licence constitutes the entire agreement between the parties with respect to the Work licensed here. To the full extent permitted by applicable law, there are no understandings, agreements or representations with respect to the Work not specified here. Licensor shall not be bound by any additional provisions that may appear in any communication from You. This Licence may not be modified without the mutual written agreement of the Licensor and You. + + f. The construction, validity and performance of this Licence shall be governed by the laws in force in New South Wales, Australia. + +Creative Commons is not a party to this Licence, and, to the full extent permitted by applicable law, makes no representation or warranty whatsoever in connection with the Work. To the full extent permitted by applicable law, Creative Commons will not be liable to You or any party on any legal theory (including, without limitation, negligence) for any damages whatsoever, including without limitation any general, special, incidental or consequential damages arising in connection to this licence. Notwithstanding the foregoing two (2) sentences, if Creative Commons has expressly identified itself as the Licensor hereunder, it shall have all rights and obligations of Licensor. + +Except for the limited purpose of indicating to the public that the Work is licensed under the CCPL, neither party will use the trademark "Creative Commons" or any related trademark or logo of Creative Commons without the prior written consent of Creative Commons. Any permitted use will be in compliance with Creative Commons' then-current trademark usage guidelines, as may be published on its website or otherwise made available upon request from time to time. + +Creative Commons may be contacted at https://creativecommons.org/. diff --git a/options/license/CDL-1.0 b/options/license/CDL-1.0 new file mode 100644 index 000000000000..e2990cde2db7 --- /dev/null +++ b/options/license/CDL-1.0 @@ -0,0 +1,53 @@ +Common Documentation License + +Version 1.0 - February 16, 2001 + +Copyright © 2001 Apple Computer, Inc. + +Permission is granted to copy and distribute verbatim copies of this License, but changing or adding to it in any way is not permitted. + +Please read this License carefully before downloading or using this material. By downloading or using this material, you are agreeing to be bound by the terms of this License. If you do not or cannot agree to the terms of this License, please do not download or use this material. + +0. Preamble. The Common Documentation License (CDL) provides a very simple and consistent license that allows relatively unrestricted use and redistribution of documents while still maintaining the author's credit and intent. To preserve simplicity, the License does not specify in detail how (e.g. font size) or where (e.g. title page, etc.) the author should be credited. To preserve consistency, changes to the CDL are not allowed and all derivatives of CDL documents are required to remain under the CDL. Together, these constraints enable third parties to easily and safely reuse CDL documents, making the CDL ideal for authors who desire a wide distribution of their work. However, this means the CDL does not allow authors to restrict precisely how their work is used or represented, making it inappropriate for those desiring more finely-grained control. + +1. General; Definitions. This License applies to any documentation, manual or other work that contains a notice placed by the Copyright Holder stating that it is subject to the terms of this Common Documentation License version 1.0 (or subsequent version thereof) ("License"). As used in this License: + +1.1 "Copyright Holder" means the original author(s) of the Document or other owner(s) of the copyright in the Document. + +1.2 "Document(s)" means any documentation, manual or other work that has been identified as being subject to the terms of this License. + +1.3 "Derivative Work" means a work which is based upon a pre-existing Document, such as a revision, modification, translation, abridgment, condensation, expansion, or any other form in which such pre-existing Document may be recast, transformed, or adapted. + +1.4 "You" or "Your" means an individual or a legal entity exercising rights under this License. + +2. Basic License. Subject to all the terms and conditions of this License, You may use, copy, modify, publicly display, distribute and publish the Document and your Derivative Works thereof, in any medium physical or electronic, commercially or non-commercially; provided that: (a) all copyright notices in the Document are preserved; (b) a copy of this License, or an incorporation of it by reference in proper form as indicated in Exhibit A below, is included in a conspicuous location in all copies such that it would be reasonably viewed by the recipient of the Document; and (c) You add no other terms or conditions to those of this License. + +3. Derivative Works. All Derivative Works are subject to the terms of this License. You may copy and distribute a Derivative Work of the Document under the conditions of Section 2 above, provided that You release the Derivative Work under the exact, verbatim terms of this License (i.e., the Derivative Work is licensed as a "Document" under the terms of this License). In addition, Derivative Works of Documents must meet the following requirements: + + (a) All copyright and license notices in the original Document must be preserved. + + (b) An appropriate copyright notice for your Derivative Work must be added adjacent to the other copyright notices. + + (c) A statement briefly summarizing how your Derivative Work is different from the original Document must be included in the same place as your copyright notice. + + (d) If it is not reasonably evident to a recipient of your Derivative Work that the Derivative Work is subject to the terms of this License, a statement indicating such fact must be included in the same place as your copyright notice. + +4. Compilation with Independent Works. You may compile or combine a Document or its Derivative Works with other separate and independent documents or works to create a compilation work ("Compilation"). If included in a Compilation, the Document or Derivative Work thereof must still be provided under the terms of this License, and the Compilation shall contain (a) a notice specifying the inclusion of the Document and/or Derivative Work and the fact that it is subject to the terms of this License, and (b) either a copy of the License or an incorporation by reference in proper form (as indicated in Exhibit A). Mere aggregation of a Document or Derivative Work with other documents or works on the same storage or distribution medium (e.g. a CD-ROM) will not cause this License to apply to those other works. + +5. NO WARRANTY. THE DOCUMENT IS PROVIDED 'AS IS' BASIS, WITHOUT WARRANTY OF ANY KIND, AND THE COPYRIGHT HOLDER EXPRESSLY DISCLAIMS ALL WARRANTIES AND/OR CONDITIONS WITH RESPECT TO THE DOCUMENT, EITHER EXPRESS, IMPLIED OR STATUTORY, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES AND/OR CONDITIONS OF MERCHANTABILITY, OF SATISFACTORY QUALITY, OF FITNESS FOR A PARTICULAR PURPOSE, OF ACCURACY, OF QUIET ENJOYMENT, AND OF NONINFRINGEMENT OF THIRD PARTY RIGHTS. + +6. LIMITATION OF LIABILITY. UNDER NO CIRCUMSTANCES SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY INCIDENTAL, SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES ARISING OUT OF OR RELATING TO THIS LICENSE OR YOUR USE, REPRODUCTION, MODIFICATION, DISTRIBUTION AND/OR PUBLICATION OF THE DOCUMENT, OR ANY PORTION THEREOF, WHETHER UNDER A THEORY OF CONTRACT, WARRANTY, TORT (INCLUDING NEGLIGENCE), STRICT LIABILITY OR OTHERWISE, EVEN IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES AND NOTWITHSTANDING THE FAILURE OF ESSENTIAL PURPOSE OF ANY REMEDY. + +7. Trademarks. This License does not grant any rights to use any names, trademarks, service marks or logos of the Copyright Holder (collectively "Marks") and no such Marks may be used to endorse or promote works or products derived from the Document without the prior written permission of the Copyright Holder. + +8. Versions of the License. Apple Computer, Inc. ("Apple") may publish revised and/or new versions of this License from time to time. Each version will be given a distinguishing version number. Once a Document has been published under a particular version of this License, You may continue to use it under the terms of that version. You may also choose to use such Document under the terms of any subsequent version of this License published by Apple. No one other than Apple has the right to modify the terms applicable to Documents created under this License. + +9. Termination. This License and the rights granted hereunder will terminate automatically if You fail to comply with any of its terms. Upon termination, You must immediately stop any further reproduction, modification, public display, distr ibution and publication of the Document and Derivative Works. However, all sublicenses to the Document and Derivative Works which have been properly granted prior to termination shall survive any termination of this License. Provisions which, by their nat ure, must remain in effect beyond the termination of this License shall survive, including but not limited to Sections 5, 6, 7, 9 and 10. + +10. Waiver; Severability; Governing Law. Failure by the Copyright Holder to enforce any provision of this License will not be deemed a waiver of future enforcement of that or any other provision. If for any reason a court of competent jurisdiction finds any provision of this License, or portion thereof, to be unenforceable, that provision of the License will be enforced to the maximum extent permissible so as to effect the economic benefits and intent of the parties, and the remainder of this License will continue in full force and effect. This License shall be governed by the laws of the United States and the State of California, except that body of California law concerning conflicts of law. + +EXHIBIT A + +The proper form for an incorporation of this License by reference is as follows: + +"Copyright (c) [year] by [Copyright Holder's name]. This material has been released under and is subject to the terms of the Common Documentation License, v.1.0, the terms of which are hereby incorporated by reference. Please obtain a copy of the License at http://www.opensource.apple.com/cdl/ and read it before using this material. Your use of this material signifies your agreement to the terms of the License." diff --git a/options/license/OPUBL-1.0 b/options/license/OPUBL-1.0 new file mode 100644 index 000000000000..1386621e0ff0 --- /dev/null +++ b/options/license/OPUBL-1.0 @@ -0,0 +1,78 @@ +Open Publication License + +v1.0, 8 June 1999 + +I. REQUIREMENTS ON BOTH UNMODIFIED AND MODIFIED VERSIONS + +The Open Publication works may be reproduced and distributed in whole or in part, in any medium physical or electronic, provided that the terms of this license are adhered to, and that this license or an incorporation of it by reference (with any options elected by the author(s) and/or publisher) is displayed in the reproduction. + +Proper form for an incorporation by reference is as follows: + + Copyright (c) by . This material may be distributed only subject to the terms and conditions set forth in the Open Publication License, vX.Y or later (the latest version is presently available at http://www.opencontent.org/openpub/). + +The reference must be immediately followed with any options elected by the author(s) and/or publisher of the document (see section VI). + +Commercial redistribution of Open Publication-licensed material is permitted. + +Any publication in standard (paper) book form shall require the citation of the original publisher and author. The publisher and author's names shall appear on all outer surfaces of the book. On all outer surfaces of the book the original publisher's name shall be as large as the title of the work and cited as possessive with respect to the title. + +II. COPYRIGHT + +The copyright to each Open Publication is owned by its author(s) or designee. + +III. SCOPE OF LICENSE + +The following license terms apply to all Open Publication works, unless otherwise explicitly stated in the document. + +Mere aggregation of Open Publication works or a portion of an Open Publication work with other works or programs on the same media shall not cause this license to apply to those other works. The aggregate work shall contain a notice specifying the inclusion of the Open Publication material and appropriate copyright notice. + +SEVERABILITY. If any part of this license is found to be unenforceable in any jurisdiction, the remaining portions of the license remain in force. + +NO WARRANTY. Open Publication works are licensed and provided "as is" without warranty of any kind, express or implied, including, but not limited to, the implied warranties of merchantability and fitness for a particular purpose or a warranty of non-infringement. + +IV. REQUIREMENTS ON MODIFIED WORKS + +All modified versions of documents covered by this license, including translations, anthologies, compilations and partial documents, must meet the following requirements: + 1. The modified version must be labeled as such. + 2. The person making the modifications must be identified and the modifications dated. + 3. Acknowledgement of the original author and publisher if applicable must be retained according to normal academic citation practices. + 4. The location of the original unmodified document must be identified. + 5. The original author's (or authors') name(s) may not be used to assert or imply endorsement of the resulting document without the original author's (or authors') permission. + +V. GOOD-PRACTICE RECOMMENDATIONS + +In addition to the requirements of this license, it is requested from and strongly recommended of redistributors that: + 1. If you are distributing Open Publication works on hardcopy or CD-ROM, you provide email notification to the authors of your intent to redistribute at least thirty days before your manuscript or media freeze, to give the authors time to provide updated documents. This notification should describe modifications, if any, made to the document. + 2. All substantive modifications (including deletions) be either clearly marked up in the document or else described in an attachment to the document. + 3. Finally, while it is not mandatory under this license, it is considered good form to offer a free copy of any hardcopy and CD-ROM expression of an Open Publication-licensed work to its author(s). + +VI. LICENSE OPTIONS + +The author(s) and/or publisher of an Open Publication-licensed document may elect certain options by appending language to the reference to or copy of the license. These options are considered part of the license instance and must be included with the license (or its incorporation by reference) in derived works. + +A. To prohibit distribution of substantively modified versions without the explicit permission of the author(s). "Substantive modification" is defined as a change to the semantic content of the document, and excludes mere changes in format or typographical corrections. + +To accomplish this, add the phrase `Distribution of substantively modified versions of this document is prohibited without the explicit permission of the copyright holder.' to the license reference or copy. + +B. To prohibit any publication of this work or derivative works in whole or in part in standard (paper) book form for commercial purposes is prohibited unless prior permission is obtained from the copyright holder. + +To accomplish this, add the phrase 'Distribution of the work or derivative of the work in any standard (paper) book form is prohibited unless prior permission is obtained from the copyright holder.' to the license reference or copy. + +OPEN PUBLICATION POLICY APPENDIX: + +(This is not considered part of the license.) + +Open Publication works are available in source format via the Open Publication home page at http://works.opencontent.org/. + +Open Publication authors who want to include their own license on Open Publication works may do so, as long as their terms are not more restrictive than the Open Publication license. + +If you have questions about the Open Publication License, please contact David Wiley, and/or the Open Publication Authors' List at opal@opencontent.org, via email. + +To subscribe to the Open Publication Authors' List: +Send E-mail to opal-request@opencontent.org with the word "subscribe" in the body. + +To post to the Open Publication Authors' List: +Send E-mail to opal@opencontent.org or simply reply to a previous post. + +To unsubscribe from the Open Publication Authors' List: +Send E-mail to opal-request@opencontent.org with the word "unsubscribe" in the body. diff --git a/options/locale/locale_bg-BG.ini b/options/locale/locale_bg-BG.ini index e8bfc851cddf..6f61799f279f 100644 --- a/options/locale/locale_bg-BG.ini +++ b/options/locale/locale_bg-BG.ini @@ -82,6 +82,7 @@ loading=Зареждане… error404=Страницата, която се опитвате да достъпите, не съществува или не сте оторизирани да я достъпите. + [error] [startpage] @@ -249,7 +250,6 @@ openid_connect_submit=Свързване openid_connect_title=Свързване към съществуващ профил openid_register_title=Създай нов акаунт openid_signin_desc=Въведете вашето OpenID URI. Например: https://anne.me, bob.openid.org.cn или gnusocial.net/carry. -disable_forgot_password_mail=Възстановяването на профили е изключено. Моля обърнете се към администратора на сайта. authorize_application=Оторизиране на приложение authorize_redirect_notice=Ще бъдете пренасочени към %s ако оторизирате това приложение. authorize_application_created_by=Това приложение е създадено от %s. @@ -258,11 +258,18 @@ authorization_failed=Оторизацията беше неуспешна sspi_auth_failed=SSPI удостоверяването беше неуспешно [mail] + activate_account=Моля активирайте Вашия профил + activate_email=Провери адрес на ел. поща + +register_notify=Добре дошли в Gitea + reset_password=Възстановете акаунта си + register_success=Успешна регистрация -register_notify=Добре дошли в Gitea + + @@ -482,6 +489,7 @@ delete_account_title=Изтриване на потребителски акау email_notifications.disable=Изключване на известията по имейл + [repo] owner=Притежател repo_name=Име на хранилището @@ -751,6 +759,7 @@ issues.review.show_outdated=Покажи остарели issues.review.hide_outdated=Скрий остарели issues.assignee.error=Не всички изпълнители бяха добавени поради неочаквана грешка. + pulls.new=Нова заявка за сливане pulls.compare_base=слей в pulls.filter_branch=Филтър по клон @@ -771,7 +780,6 @@ pulls.merged_as=Тази заявка за сливане е била обеди pulls.is_closed=Тази заявка за сливане е затворена. pulls.has_merged=Тази заявка за сливане е обединена. pulls.title_wip_desc=`Започнете заглавието с %s, за да предотвратите случайно обединяване на заявката за сливане.` -pulls.cannot_merge_work_in_progress=Тази завка за сливане е отбелязане като работа в прогрес. Премахнете %s представката от заглавието, когато е готова pulls.can_auto_merge_desc=Може да се извърши обединяване на тази заявка за сливане. pulls.num_conflicting_files_1=%d конфликтен файл pulls.num_conflicting_files_n=%d конфликтни файлове diff --git a/options/locale/locale_cs-CZ.ini b/options/locale/locale_cs-CZ.ini index d58e7e737aac..0538e0f7b16b 100644 --- a/options/locale/locale_cs-CZ.ini +++ b/options/locale/locale_cs-CZ.ini @@ -93,6 +93,7 @@ step2=Krok 2: error404=Stránka, kterou se snažíte zobrazit, buď neexistuje, nebo nemáte oprávnění ji zobrazit. + [error] occurred=Nastala chyba report_message=Pokud jste si jisti, že se jedná o chybu Gitea, prosím vyhledejte problém na GitHub a v případě potřeby otevřete nový problém. @@ -206,6 +207,7 @@ default_enable_timetracking_popup=Povolí sledování času pro nové repozitá no_reply_address=Skrytá e-mailová doména no_reply_address_helper=Název domény pro uživatele se skrytou e-mailovou adresou. Příklad: Pokud je název skryté e-mailové domény nastaven na „noreply.example.org“, uživatelské jméno „joe“ bude zaznamenáno v Gitu jako „joe@noreply.example.org“. password_algorithm=Hash algoritmus hesla +password_algorithm_helper=Nastavte algoritmus hashování hesla. Algoritmy mají odlišné požadavky a sílu. `argon2` používá mnoho paměti a může být nevhodný pro malé systémy. [home] uname_holder=Uživatelské jméno nebo e-mailová adresa @@ -298,7 +300,6 @@ openid_connect_desc=Zvolené OpenID URI není známé. Přidružte nový účet openid_register_title=Vytvořit nový účet openid_register_desc=Zvolené OpenID URI není známé. Přidružte nový účet zde. openid_signin_desc=Zadejte své OpenID URI. Například: https://anne.me, bob.openid.org.cn nebo gnusocial.net/carry. -disable_forgot_password_mail=Obnovení účtu je zakázané. Prosíme, kontaktujte správce systému. email_domain_blacklisted=Nemůžete se registrovat s vaší e-mailovou adresou. authorize_application=Autorizovat aplikaci authorize_redirect_notice=Budete přesměrováni na %s, pokud autorizujete tuto aplikaci. @@ -312,14 +313,26 @@ password_pwned=Heslo, které jste zvolili, je na Migrovat repozitář. owner=Vlastník owner_helper=Některé organizace se nemusejí v seznamu zobrazit kvůli maximálnímu dosaženému počtu repozitářů. repo_name=Název repozitáře @@ -692,26 +707,39 @@ repo_desc=Popis repo_desc_helper=Zadejte krátký popis (volitelné) repo_lang=Jazyk repo_gitignore_helper=Vyberte šablony .gitignore. +repo_gitignore_helper_desc=Vyberte soubory, které nechcete sledovat ze seznamu šablon pro běžné jazyky. Typické artefakty generované nástroji pro sestavení každého jazyka jsou ve výchozím stavu součástí .gitignore. issue_labels=Štítky úkolů issue_labels_helper=Vyberte sadu štítků úkolů. license=Licence license_helper=Vyberte licenční soubor. +license_helper_desc=Licence řídí, co ostatní mohou a nemohou dělat s vaším kódem. Nejste si jisti, která je pro váš projekt správná? Podívejte se na Zvolte licenci readme=README readme_helper=Vyberte šablonu souboru README. +readme_helper_desc=Toto je místo, kde můžete napsat úplný popis vašeho projektu. auto_init=Inicializovat repozitář (Přidá .gitignore, License a README) +trust_model_helper=Vyberte model důvěry pro ověření podpisu. Možnosti jsou: trust_model_helper_collaborator=Spolupracovník: Důvěřovat podpisům spolupracovníků trust_model_helper_committer=Přispěvatel: Důvěřovat podpisům, které se shodují s přispěvateli +trust_model_helper_collaborator_committer=Spolupracovník+Tvůrce revize: Důvěřovat podpisům od spolupracovníků, které odpovídají tvůrci revize +trust_model_helper_default=Výchozí: Použít výchozí model důvěry pro tuto instalaci create_repo=Vytvořit repozitář default_branch=Výchozí větev +default_branch_helper=Výchozí větev je základní větev pro požadavky na natažení a revize kódu. mirror_prune=Vyčistit mirror_prune_desc=Odstranit zastaralé reference na vzdálené sledování mirror_interval=Interval zrcadlení (platné časové jednotky jsou „h“, „m“ a „s“). 0 zakáže automatickou synchronizaci. mirror_interval_invalid=Interval zrcadlení není platný. mirror_address=Klonovat z URL -mirror_address_desc=Zadejte nějaké přístupové údaje do sekce Ověření klonování. mirror_address_url_invalid=Poskytnutá URL je neplatná. Všechny komponenty musíte správně nahradit escape sekvencí. mirror_address_protocol_invalid=Zadaná URL je neplatná. Mohou být zrcadleny pouze umístění http(s):// nebo git://. +mirror_lfs=Úložiště velkých souborů (LFS) +mirror_lfs_desc=Aktivovat zrcadlení dat LFS. +mirror_lfs_endpoint=Koncový bod LFS +mirror_lfs_endpoint_desc=Synchronizace se pokusí použít URL pro klonování k určení LFS serveru. Můžete také zadat vlastní koncový bod, pokud jsou data LFS repozitáře uložena někde jinde. mirror_last_synced=Poslední synchronizace +mirror_password_placeholder=(Nezměněno) +mirror_password_blank_placeholder=(Nenastaveno) +mirror_password_help=Změňte uživatelské jméno pro vymazání uloženého hesla. watchers=Sledující stargazers=Sledující forks=Rozštěpení @@ -764,11 +792,15 @@ form.reach_limit_of_creation_n=Již jste dosáhli svůj limit %d repozitářů. form.name_reserved=Jméno repozitáře „%s“ je rezervované. form.name_pattern_not_allowed=Vzor „%s“ není povolený v názvu repozitáře. -need_auth=Ověření klonování migrate_options=Možnosti migrace migrate_service=Migrační služba migrate_options_mirror_helper=Tento repozitář bude zrcadlem migrate_options_mirror_disabled=Administrátor vašeho webu zakázal nová zrcadla. +migrate_options_lfs=Migrovat LFS soubory +migrate_options_lfs_endpoint.label=Koncový bod LFS +migrate_options_lfs_endpoint.description=Migrace se pokusí použít váš vzdálený Git pro určení LFS serveru. Můžete také zadat vlastní koncový bod, pokud jsou data LFS repozitáře uložena někde jinde. +migrate_options_lfs_endpoint.description.local=Podporována je také cesta k lokálnímu serveru. +migrate_options_lfs_endpoint.placeholder=Ponechte prázdné pro odvození z URL adresy pro klonování migrate_items=Položky pro migrování migrate_items_wiki=Wiki migrate_items_milestones=Milníky @@ -782,7 +814,10 @@ migrate.clone_address=Migrovat / klonovat z URL migrate.clone_address_desc=HTTP(S) nebo URL pro klonování existujícího repozitáře migrate.clone_local_path=nebo místní cesta serveru migrate.permission_denied=Není dovoleno importovat místní repozitáře. +migrate.permission_denied_blocked=Nemáte oprávnění provést import z blokovaných serverů. +migrate.permission_denied_private_ip=Nemáte oprávnění provést import ze soukromých IP adres. migrate.invalid_local_path=Místní cesta je neplatná, buď neexistuje nebo není adresářem. +migrate.invalid_lfs_endpoint=Koncový bod LFS není platný. migrate.failed=Přenesení selhalo: %v migrate.migrate_items_options=Pro migraci dalších položek je vyžadován přístupový token migrated_from=Migrováno z %[2]s @@ -827,6 +862,7 @@ branch=Větev tree=Strom clear_ref=`Vymazat aktuální referenci" filter_branch_and_tag=Filtr pro větev nebo značku +find_tag=Najít značku branches=Větve tags=Značky issues=Úkoly @@ -1096,6 +1132,8 @@ issues.context.edit=Upravit issues.context.delete=Smazat issues.no_content=Není zde žádný obsah. issues.close_issue=Zavřít +issues.pull_merged_at=`sloučil(a) revizi %[2]s do %[3]s %[4]s` +issues.manually_pull_merged_at=`sloučil(a) revizi %[2]s do %[3]s ručně %[4]s` issues.close_comment_issue=Okomentovat a zavřít issues.reopen_issue=Znovuotevřít issues.reopen_comment_issue=Okomentovat a znovuotevřít @@ -1117,6 +1155,8 @@ issues.re_request_review=Znovu požádat o posouzení issues.is_stale=Od tohoto posouzení došlo ke změnám v tomto požadavku na natažení issues.remove_request_review=Odstranit žádost o posouzení issues.remove_request_review_block=Nelze odstranit žádost o posouzení +issues.dismiss_review=Zamítnout posouzení +issues.dismiss_review_warning=Jste si jisti, že chcete zamítnout toto posouzení? issues.sign_in_require_desc=Přihlaste se pro zapojení do konverzace. issues.edit=Upravit issues.cancel=Zrušit @@ -1171,6 +1211,7 @@ issues.stop_tracking_history=`ukončil(a) práci %s` issues.cancel_tracking=Zahodit issues.cancel_tracking_history=`zrušil(a) sledování času %s` issues.add_time=Přidat čas ručně +issues.del_time=Odstranit tento časový záznam issues.add_time_short=Přidat čas issues.add_time_cancel=Zrušit issues.add_time_history=`přidal(a) strávený čas %s` @@ -1186,6 +1227,7 @@ issues.error_modifying_due_date=Změna termínu dokončení selhala. issues.error_removing_due_date=Odstranění termínu dokončení selhalo. issues.push_commit_1=přidal(a) %d revizi %s issues.push_commits_n=přidal(a) %d revize %s +issues.force_push_codes=`vynucené nahrání %[1]s od %[2]s do %[4]s %[6]s` issues.due_date_form=rrrr-mm-dd issues.due_date_form_add=Přidat termín dokončení issues.due_date_form_edit=Upravit @@ -1228,6 +1270,8 @@ issues.review.self.approval=Nemůžete schválit svůj požadavek na natažení. issues.review.self.rejection=Nemůžete požadovat změny ve svém vlastním požadavku na natažení. issues.review.approve=schválil tyto změny %s issues.review.comment=posoudil %s +issues.review.dismissed=zamítl(a) posouzení od %s %s +issues.review.dismissed_label=Zamítnuto issues.review.left_comment=zanechal komentář issues.review.content.empty=Je potřeba zanechat poznámku s uvedením požadované změny (požadovaných změn). issues.review.reject=požadované změny %s @@ -1249,6 +1293,9 @@ issues.review.resolved_by=označil tuto konverzaci jako vyřešenou issues.assignee.error=Ne všichni zpracovatelé byli přidáni z důvodu neočekávané chyby. issues.reference_issue.body=Tělo zprávy +compare.compare_base=základ +compare.compare_head=porovnat + pulls.desc=Povolit požadavky na natažení a posuzování kódu. pulls.new=Nový požadavek na natažení pulls.compare_changes=Nový požadavek na natažení @@ -1258,6 +1305,7 @@ pulls.compare_compare=natáhnout z pulls.filter_branch=Filtrovat větev pulls.no_results=Nebyly nalezeny žádné výsledky. pulls.nothing_to_compare=Tyto větve jsou stejné. Není potřeba vytvářet požadavek na natažení. +pulls.nothing_to_compare_and_allow_empty_pr=Tyto větve jsou stejné. Tento požadavek na natažení bude prázdný. pulls.has_pull_request=`Požadavek na natažení mezi těmito dvěma větvemi již existuje: %[2]s#%[3]d` pulls.create=Vytvořit požadavek na natažení pulls.title_desc=chce sloučit %[1]d revizí z větve %[2]s do %[3]s @@ -1271,10 +1319,14 @@ pulls.cant_reopen_deleted_branch=Tento požadavek na natažení nemůže být zn pulls.merged=Sloučený pulls.merged_as=Požadavek na natažení byl sloučen jako %[2]s. pulls.manually_merged=Sloučeno ručně +pulls.manually_merged_as=Požadavek na natažení byl ručně sloučen jako %[2]s. pulls.is_closed=Požadavek na natažení byl uzavřen. pulls.has_merged=Požadavek na natažení byl sloučen. pulls.title_wip_desc=`Začněte název s %s a zamezíte tak nechtěnému sloučení požadavku na natažení.` -pulls.cannot_merge_work_in_progress=Požadavek na natažení je označen jako ve vývoji. Odstraňte %s prefix z titulku, až bude hotový +pulls.cannot_merge_work_in_progress=Tento požadavek na natažení je označen jako probíhající práce. +pulls.still_in_progress=Stále probíhá? +pulls.add_prefix=Přidat prefix %s +pulls.remove_prefix=Odstranit prefix %s pulls.data_broken=Tento požadavek na natažení je rozbitý kvůli chybějícím informacím o rozštěpení. pulls.files_conflicted=Tento požadavek na natažení obsahuje změny, které kolidují s cílovou větví. pulls.is_checking=Právě probíhá kontrola konfliktů při sloučení. Zkuste to za chvíli. @@ -1299,6 +1351,7 @@ pulls.reject_count_1=%d žádost o změnu pulls.reject_count_n=%d žádosti o změnu pulls.waiting_count_1=%d čekající posouzení pulls.waiting_count_n=%d čekající posouzení +pulls.wrong_commit_id=ID revize musí být ID revize v cílové větvi pulls.no_merge_desc=Tento požadavek na natažení nemůže být sloučen, protože všechny možnosti repozitáře na sloučení jsou zakázány. pulls.no_merge_helper=Povolte možnosti sloučení v nastavení repozitáře nebo proveďte sloučení požadavku na natažení ručně. @@ -1310,6 +1363,7 @@ pulls.rebase_merge_pull_request=Rebase a sloučit pulls.rebase_merge_commit_pull_request=Rebase a sloučit (--no-ff) pulls.squash_merge_pull_request=Squash a sloučit pulls.merge_manually=Sloučeno ručně +pulls.merge_commit_id=ID slučovací revize pulls.require_signed_wont_sign=Větev vyžaduje podepsané revize, ale toto sloučení nebude podepsáno pulls.invalid_merge_option=Nemůžete použít tuto možnost sloučení pro tento požadavek na natažení. pulls.merge_conflict=Sloučení selhalo: Došlo ke konfliktu při sloučení. Tip: Zkuste jinou strategii @@ -1443,6 +1497,7 @@ activity.closed_issues_count_1=Uzavřený úkol activity.closed_issues_count_n=Uzavřené úkoly activity.title.issues_1=%d úkol activity.title.issues_n=%d úkolů +activity.title.issues_closed_from=%s uzavřel z %s activity.title.issues_created_by=%s vytvořil %s activity.closed_issue_label=Uzavřený activity.new_issues_count_1=Nový úkol @@ -1505,6 +1560,7 @@ settings.email_notifications.disable=Zakázat e-mailová oznámení settings.email_notifications.submit=Nastavit předvolby e-mailu settings.site=Webová stránka settings.update_settings=Aktualizovat nastavení +settings.branches.update_default_branch=Aktualizovat výchozí větev settings.advanced_settings=Pokročilá nastavení settings.wiki_desc=Povolit Wiki repozitáře settings.use_internal_wiki=Používat vestavěnou Wiki @@ -1532,6 +1588,8 @@ settings.pulls.allow_merge_commits=Povolit slučování revizí settings.pulls.allow_rebase_merge=Povolit rebase pro slučovací revize settings.pulls.allow_rebase_merge_commit=Povolit rebase s vyžádanou slučovací revizí (--no-ff) settings.pulls.allow_squash_commits=Povolit squash pro slučovací revize +settings.pulls.allow_manual_merge=Povolit označování požadavků na natažení jako ručně sloučené +settings.pulls.enable_autodetect_manual_merge=Povolit autodetekci ručních sloučení (Poznámka: V některých zvláštních případech může dojít k nesprávnému rozhodnutí) settings.projects_desc=Povolit projekty v repozitáři settings.admin_settings=Nastavení správce settings.admin_enable_health_check=Povolit kontrolu stavu repozitáře (git fsck) @@ -1682,7 +1740,6 @@ settings.event_pull_request_review_desc=Požadavek na natažení schválen, odm settings.event_pull_request_sync=Požadavek na natažení synchronizován settings.event_pull_request_sync_desc=Požadavek na natažení synchronizován. settings.branch_filter=Filtr větví -settings.branch_filter_desc=Povolené větve pro události nahrání, vytvoření větve a smazání větve jsou určeny pomocí zástupného vzoru. Pokud je prázdný nebo *, všechny události jsou ohlášeny. Podívejte se na dokumentaci syntaxe na github.com/gobwas/glob. Příklady: master, {master,release*}. settings.active=Aktivní settings.active_helper=Informace o spuštěných událostech budou odeslány na URL webového háčku. settings.add_hook_success=Webový háček byl přidán. @@ -1752,7 +1809,6 @@ settings.dismiss_stale_approvals_desc=Pokud budou do větve nahrány nové reviz settings.require_signed_commits=Vyžadovat podepsané revize settings.require_signed_commits_desc=Odmítnout nahrání do této větve pokud nejsou podepsaná nebo jsou neověřitelná. settings.protect_protected_file_patterns=Chráněné vzory souborů (oddělené středníkem „\;“): -settings.protect_protected_file_patterns_desc=Chráněné soubory, které nemají povoleno být měněny přímo, i když uživatel má právo přidávat, upravovat nebo mazat soubory v této větvi. Více vzorů lze oddělit pomocí středníku („\;“). Podívejte se na github.com/gobwas/glob dokumentaci pro syntaxi vzoru. Příklady: .drone.yml, /docs/**/*.txt. settings.add_protected_branch=Zapnout ochranu settings.delete_protected_branch=Vypnout ochranu settings.update_protect_branch_success=Ochrana větví pro větev „%s“ byla aktualizována. @@ -1766,6 +1822,7 @@ settings.block_on_official_review_requests_desc=Slučování nebude možné, pok settings.block_outdated_branch=Blokovat sloučení, pokud je požadavek na natažení zastaralý settings.block_outdated_branch_desc=Slučování nebude možné, pokud je hlavní větev za základní větví. settings.default_branch_desc=Vybrat výchozí větev repozitáře pro požadavky na natažení a revize kódu: +settings.default_merge_style_desc=Výchozí styl sloučení pro požadavky na natažení: settings.choose_branch=Vyberte větev… settings.no_protected_branch=Nejsou tu žádné chráněné větve. settings.edit_protected_branch=Upravit @@ -1834,13 +1891,15 @@ diff.whitespace_ignore_at_eol=Ignorovat změny v bílých znacích na konci ří diff.stats_desc= %d změnil soubory, kde provedl %d přidání a %d odebrání diff.stats_desc_file=%d změn: %d přidání a %d smazání diff.bin=binární +diff.bin_not_shown=Binární soubor nebyl zobrazen. diff.view_file=Zobrazit soubor diff.file_before=Před diff.file_after=Za diff.file_image_width=Šířka diff.file_image_height=Výška diff.file_byte_size=Velikost -diff.file_suppressed=Diff nebyl zobrazen, protože je příliš veliký +diff.file_suppressed=Rozdílový obsah nebyl zobrazen, protože je příliš veliký +diff.file_suppressed_line_too_long=Rozdílový obsah nebyl zobrazen, protože některé řádky jsou příliš dlouhá diff.too_many_files=Některé soubory nejsou zobrazny, neboť je v této revizi změněno mnoho souborů diff.comment.placeholder=Zanechat komentář diff.comment.markdown_info=Je podporována úprava vzhledu pomocí markdown. @@ -1857,6 +1916,7 @@ diff.review.reject=Požadovat změny diff.committed_by=odevzdal diff.protected=Chráněno diff.image.side_by_side=Vedle sebe +diff.image.swipe=Posunout diff.image.overlay=Překrytí releases.desc=Sledování verzí projektu a souborů ke stažení. @@ -1867,6 +1927,7 @@ release.new_release=Nové vydání release.draft=Koncept release.prerelease=Předběžná verze release.stable=Stabilní +release.compare=Porovnat release.edit=upravit release.ahead.commits=%d revizí release.ahead.target=do %s od tohoto vydání @@ -1924,6 +1985,10 @@ branch.restore=Obnovit větev „%s“ branch.download=Stáhnout větev „%s“ branch.included_desc=Tato větev je součástí výchozí větve branch.included=Zahrnuje +branch.create_new_branch=Vytvořit větev z větve: +branch.confirm_create_branch=Vytvořit větev +branch.new_branch=Vytvořit novou větev +branch.new_branch_from=Vytvořit novou větev z „%s“ tag.create_tag=Vytvořit značku %s tag.create_success=Značka „%s“ byla vytvořena. @@ -1933,6 +1998,9 @@ topic.done=Hotovo topic.count_prompt=Nelze vybrat více než 25 témat topic.format_prompt=Téma musí začínat písmenem nebo číslem, může obsahovat pomlčky („-“) a může být dlouhé až 35 znaků. +error.csv.too_large=Tento soubor nelze vykreslit, protože je příliš velký. +error.csv.unexpected=Tento soubor nelze vykreslit, protože obsahuje neočekávaný znak na řádku %d ve sloupci %d. +error.csv.invalid_field_count=Soubor nelze vykreslit, protože má nesprávný počet polí na řádku %d. [org] org_name_holder=Název organizace @@ -2081,6 +2149,7 @@ dashboard.cron.error=Chyba v naplánované úloze: %s: %[3]s dashboard.cron.finished=Naplánovaná úloha: %[1]s skončila dashboard.delete_inactive_accounts=Smazat všechny neaktivované účty dashboard.delete_inactive_accounts.started=Spuštěna úloha mazání všech neaktivovaných účtů. +dashboard.delete_repo_archives=Odstranit všechny archivy repozitáře (ZIP, TAR.GZ, atd.) dashboard.delete_repo_archives.started=Spuštěna úloha smazání všech archivovaných repozitářů. dashboard.delete_missing_repos=Smazat všechny repozitáře, které nemají Git soubory dashboard.delete_missing_repos.started=Spuštěna úloha mazání všech repozitářů, které nemají Git soubory. @@ -2129,6 +2198,8 @@ dashboard.total_gc_time=Celková pauza GC dashboard.total_gc_pause=Celková pauza GC dashboard.last_gc_pause=Poslední pauza GC dashboard.gc_times=Časy GC +dashboard.delete_old_actions=Odstranit všechny staré akce z databáze +dashboard.delete_old_actions.started=Začalo odstraňování všech starých akcí z databáze. users.user_manage_panel=Správa uživatelských účtů users.new_account=Vytvořit uživatelský účet @@ -2224,7 +2295,6 @@ auths.host=Server auths.port=Port auths.bind_dn=Připojení DN auths.bind_password=Heslo připojení -auths.bind_password_helper=Upozornění: Toto heslo je ukládáno v nešifrované podobě. Použijte účet pouze pro čtení, pokud je to možné. auths.user_base=Výchozí bod hledání uživatelů auths.user_dn=DN uživatele auths.attribute_username=Atribut uživatelského jména @@ -2255,6 +2325,7 @@ auths.allowed_domains_helper=Nechte prázdné k povolení všech domén. Oddělt auths.enable_tls=Povolit šifrování TLS auths.skip_tls_verify=Přeskočit ověření TLS auths.pam_service_name=Název služby PAM +auths.pam_email_domain=PAM e-mailová doména (volitelné) auths.oauth2_provider=Poskytovatel OAuth2 auths.oauth2_icon_url=URL ikony auths.oauth2_clientID=Klientské ID (klíč) @@ -2354,6 +2425,7 @@ config.db_path=Cesta config.service_config=Nastavení služby config.register_email_confirm=Pro registraci vyžadovat potvrzení e-mailu config.disable_register=Vypnout možnost uživatelské registrace +config.allow_only_internal_registration=Povolit registraci pouze prostřednictvím Gitea config.allow_only_external_registration=Povolit registraci pouze prostřednictvím externích služeb config.enable_openid_signup=Povolit automatickou registraci pomocí OpenID config.enable_openid_signin=Povolit přihlášení pomocí OpenID @@ -2547,6 +2619,7 @@ mirror_sync_delete=synchronizoval(a) a smazal(a) referenci %[2]s v approve_pull_request=`schválil(a) %s#%[2]s` reject_pull_request=`navrhl(a) změny pro %s#%[2]s` publish_release=`vydána značka "%[4]s" v %[3]s` +review_dismissed=`zamítl posouzení z %[4]s pro %[3]s#%[2]s` review_dismissed_reason=Důvod: create_branch=vytvořena větev %[3]s v %[4]s diff --git a/options/locale/locale_de-DE.ini b/options/locale/locale_de-DE.ini index 6b74fb9a8a49..bd6067b5cf5f 100644 --- a/options/locale/locale_de-DE.ini +++ b/options/locale/locale_de-DE.ini @@ -83,6 +83,7 @@ add=Hinzufügen add_all=Alle hinzufügen remove=Löschen remove_all=Alle entfernen +edit=Bearbeiten write=Verfassen preview=Vorschau @@ -91,11 +92,16 @@ loading=Laden… step1=Schritt 1: step2=Schritt 2: +error=Fehler error404=Die Seite, die du gerade versuchst aufzurufen, existiert entweder nicht oder du bist nicht berechtigt, diese anzusehen. +never=Niemals + [error] occurred=Ein Fehler ist aufgetreten report_message=Wenn du dir sicher bist, dass dies ein Gitea-Fehler ist, suche bitte auf GitHub nach diesem Fehler und erstelle gegebenenfalls einen neuen Bugreport. +missing_csrf=Fehlerhafte Anfrage: Kein CSRF Token verfügbar +invalid_csrf=Fehlerhafte Anfrage: Ungültiger CSRF Token [startpage] app_desc=Ein einfacher, selbst gehosteter Git-Service @@ -299,7 +305,8 @@ openid_connect_desc=Die gewählte OpenID-URI ist unbekannt. Ordne sie hier einem openid_register_title=Neues Konto einrichten openid_register_desc=Die gewählte OpenID-URI ist unbekannt. Ordne sie hier einem neuen Account zu. openid_signin_desc=Gib deine OpenID-URI ein. Zum Beispiel: https://anne.me, bob.openid.org.cn oder gnusocial.net/carry. -disable_forgot_password_mail=Die Kontowiederherstellung ist deaktiviert. Bitte wende dich an den Administrator. +disable_forgot_password_mail=Die Kontowiederherstellung ist deaktiviert, da keine E-Mail eingerichtet ist. Bitte kontaktiere den zuständigen Administrator. +disable_forgot_password_mail_admin=Die Kontowiederherstellung ist nur verfügbar, wenn eine E-Mail eingerichtet wurde. Bitte richte eine E-Mail Adresse ein, um die Kontowiederherstellung freizuschalten. email_domain_blacklisted=Du kannst dich nicht mit deiner E-Mail-Adresse registrieren. authorize_application=Anwendung autorisieren authorize_redirect_notice=Du wirst zu %s weitergeleitet, wenn du diese Anwendung autorisierst. @@ -313,19 +320,64 @@ password_pwned=Das von dir gewählte Passwort ist auf einer %s, + activate_account=Bitte aktiviere dein Konto +activate_account.title=%s, bitte aktiviere dein Konto +activate_account.text_1=Hallo %[1]s, danke für deine Registrierung bei %[2]! +activate_account.text_2=Bitte klicke innerhalb von %s auf folgenden Link, um dein Konto zu aktivieren: + activate_email=Bestätige deine E-Mail-Adresse +activate_email.title=%s, bitte verifiziere deine E-Mail-Adresse +activate_email.text=Bitte klicke innerhalb von %s auf folgenden Link, um dein Konto zu aktivieren: + +register_notify=Willkommen bei Gitea +register_notify.title=%[1]s, willkommen bei %[2]s +register_notify.text_1=dies ist deine Bestätigungs-E-Mail für %s! +register_notify.text_2=Du kannst dich jetzt mit dem Benutzernamen "%s" anmelden. +register_notify.text_3=Wenn dieser Account von dir erstellt wurde, musst du zuerst dein Passwort setzen. + reset_password=Stelle dein Konto wieder her +reset_password.title=%s, du hast um Wiederherstellung deines Kontos gebeten +reset_password.text=Bitte klicke innerhalb von %s auf folgenden Link, um dein Konto wiederherzustellen: + register_success=Registrierung erfolgreich -register_notify=Willkommen bei Gitea + +issue_assigned.pull=@%[1]s hat dich im Repository %[3]s dem Pull Request %[2]s zugewiesen. +issue_assigned.issue=@%[1]s hat dich im Repository %[3]s dem Issue %[2]s zugewiesen. + +issue.x_mentioned_you=@%s hat dich erwähnt: +issue.action.force_push=%[1]s hat %[3]s mit %[4]s auf %[2]s überschrieben. +issue.action.push_1=@%[1]s hat einen Commit auf %[2]s gepusht +issue.action.push_n=@%[1]s hat %[3]d Commits auf %[2]s gepusht +issue.action.close=@%[1]s hat #%[2]d geschlossen. +issue.action.reopen=@%[1]s hat #%[2]d wieder geöffnet. +issue.action.merge=@%[1]s hat #%[2]d in %[3]s gemergt. +issue.action.approve=@%[1]s hat diesen Pull-Request approved. +issue.action.reject=@%[1]s hat Änderungen auf diesem Pull-Request angefordert. +issue.action.review=@%[1]s hat diesen Pull-Request kommentiert. +issue.action.review_dismissed=@%[1]s hat das letzte Review von %[2]s für diesen Pull Request verworfen. +issue.action.ready_for_review=@%[1]s hat diesen Pull Request zum Review freigegeben. +issue.action.new=@%[1]s hat #%[2]d geöffnet. +issue.in_tree_path=In %s: release.new.subject=Release %s in %s erschienen +release.new.text=@%[1]s hat %[2]s in %[3]s released +release.title=Titel: %s +release.note=Anmerkung: +release.downloads=Downloads: +release.download.zip=Quellcode (ZIP Datei) +release.download.targz=Quellcode (TAR.GZ Datei) repo.transfer.subject_to=%s möchte "%s" an %s übertragen repo.transfer.subject_to_you=%s möchte dir "%s" übertragen repo.transfer.to_you=dir +repo.transfer.body=Um es anzunehmen oder abzulehnen, öffne %s, oder ignoriere es einfach. repo.collaborator.added.subject=%s hat dich zu %s hinzugefügt +repo.collaborator.added.text=Du wurdest als Mitarbeiter für folgendes Repository hinzugefügt: [modal] yes=Ja @@ -366,6 +418,7 @@ email_error=` ist keine gültige E-Mail-Adresse.` url_error=` ist keine gültige URL.` include_error=` muss den Text „%s“ enthalten.` glob_pattern_error=` Der Glob Pattern ist ungültig: %s.` +regex_pattern_error=` regex ist ungültig: %s.` unknown_error=Unbekannter Fehler: captcha_incorrect=Der eingegebene CAPTCHA-Code ist falsch. password_not_match=Die Passwörter stimmen nicht überein. @@ -542,7 +595,20 @@ ssh_key_been_used=Dieser SSH-Key wird auf diesem Server bereits verwendet. ssh_key_name_used=Ein gleichnamiger SSH-Key existiert bereits in deinem Account. ssh_principal_been_used=Diese Identität ist bereits auf dem Server vorhanden. gpg_key_id_used=Ein öffentlicher GPG-Schlüssel mit der gleichen ID existiert bereits. -gpg_no_key_email_found=Dieser GPG-Schlüssel kann mit keiner E-Mail-Adresse deines Kontos verwendet werden. +gpg_no_key_email_found=Dieser GPG-Key entspricht keiner mit deinem Account verbundenen aktivierten E-Mail-Addresse. Er kann trotzdem hinzugefügt werden, wenn du den gegebenen Token signierst. +gpg_key_matched_identities=Passende Identitäten: +gpg_key_matched_identities_long=Die eingebetteten Identitäten in diesem Schlüssel stimmen mit den folgenden aktivierten E-Mail-Adressen für diesen Benutzer überein. Commits, die mit diesen E-Mail-Addressen committed wurden, können mit diesem Schlüssel verifiziert werden. +gpg_key_verified=Verifizierter Schlüssel +gpg_key_verified_long=Der Schlüssel wurde mit einem Token verifiziert. Er kann verwendet werden, um Commits zu verifizieren, die mit irgendeiner für diesen Nutzer aktivierten E-Mail-Adresse und irgendeiner Identität dieses Schlüssels übereinstimmen. +gpg_key_verify=Verifizieren +gpg_invalid_token_signature=Der GPG Key, die Signatur, und das Token stimmen nicht überein, oder das Token ist veraltet. +gpg_token_required=Du musst eine Signatur für das folgende Token angeben +gpg_token=Token +gpg_token_help=Du kannst eine Signatur wie folgt generieren: +gpg_token_code=echo "%s" | gpg -a --default-key %s --detach-sig +gpg_token_signature=GPG Textsignatur (armored signature) +key_signature_gpg_placeholder=Beginnt mit '-----BEGIN PGP SIGNATURE-----' +verify_gpg_key_success=Der GPG-Key "%s" wurde verifiziert. subkeys=Unterschlüssel key_id=Schlüssel-ID key_name=Schlüsselname @@ -673,6 +739,14 @@ email_notifications.onmention=Nur E-Mail bei Erwähnung email_notifications.disable=E-Mail Benachrichtigungen deaktivieren email_notifications.submit=E-Mail-Einstellungen festlegen +visibility=Nutzer Sichtbarkeit +visibility.public=Öffentlich +visibility.public_tooltip=Für alle Nutzer sichtbar +visibility.limited=Begrenzt +visibility.limited_tooltip=Nur für eingeloggte Benutzer sichtbar +visibility.private=Privat +visibility.private_tooltip=Nur für Organisationsmitglieder sichtbar + [repo] new_repo_helper=Ein Repository enthält alle Projektdateien, einschließlich des Änderungsverlaufs. Schon woanders vorhanden? Migriere das Repository. owner=Besitzer @@ -723,7 +797,7 @@ mirror_prune_desc=Entferne veraltete remote-tracking Referenzen mirror_interval=Spiegel-Intervall (gültige Zeiteinheiten sind 'h', 'm', 's'). 0 schaltet die automatische Synchronisierung aus. mirror_interval_invalid=Das Spiegel-Intervall ist ungültig. mirror_address=Klonen via URL -mirror_address_desc=Gib alle erforderlichen Anmeldedaten im Abschnitt "Autorisierung klonen" ein. +mirror_address_desc=Gib alle erforderlichen Anmeldedaten im Abschnitt "Authentifizierung" ein. mirror_address_url_invalid=Die angegebene URL ist ungültig. Achte darauf, alle URL-Komponenten korrekt zu maskieren. mirror_address_protocol_invalid=Die angegebene URL ist ungültig. Nur Pfade beginnend mit http(s):// oder git:// können gespiegelt werden. mirror_lfs=Großdatei-Speicher (LFS) @@ -731,6 +805,9 @@ mirror_lfs_desc=Mirroring von LFS-Dateien aktivieren. mirror_lfs_endpoint=LFS-Endpunkt mirror_lfs_endpoint_desc=Sync wird versuchen, die Klon-URL zu verwenden, um den LFS-Server zu bestimmen. Du kannst auch einen eigenen Endpunkt angeben, wenn die LFS-Dateien woanders gespeichert werden. mirror_last_synced=Zuletzt synchronisiert +mirror_password_placeholder=(unverändert) +mirror_password_blank_placeholder=(Nicht gesetzt) +mirror_password_help=Ändere den Benutzernamen, um ein gespeichertes Passwort zu löschen. watchers=Beobachter stargazers=Favorisiert von forks=Forks @@ -747,6 +824,7 @@ delete_preexisting_label=Löschen delete_preexisting=Vorhandene Dateien löschen delete_preexisting_content=Dateien in %s löschen delete_preexisting_success=Nicht übernommene Dateien in %s gelöscht +blame_prior=Blame vor dieser Änderung anzeigen transfer.accept=Übertragung Akzeptieren transfer.accept_desc=Übertragung nach "%s" @@ -783,7 +861,7 @@ form.reach_limit_of_creation_n=Du hast bereits dein Limit von %d Repositories er form.name_reserved=Der Repository-Name „%s“ ist reserviert. form.name_pattern_not_allowed='%s' ist nicht erlaubt für Repository-Namen. -need_auth=Authentifizierung zum Klonen benötigt +need_auth=Authentifizierung migrate_options=Migrationsoptionen migrate_service=Migrationsdienst migrate_options_mirror_helper=Dieses Repository wird ein Mirror sein @@ -817,11 +895,19 @@ migrated_from_fake=Migriert von %[1]s migrate.migrate=Migrieren von %s migrate.migrating=Migriere von %s ... migrate.migrating_failed=Migrieren von %s fehlgeschlagen. +migrate.migrating_failed.error=Fehler: %s migrate.github.description=Migriere Daten von Github.com oder Github Enterprise. migrate.git.description=Migriere oder spiegele git-Daten von Git-Services migrate.gitlab.description=Migriere Daten von GitLab.com oder einem selbst gehostetem gitlab Server. migrate.gitea.description=Migriere Daten von Gitea.com oder einem selbst gehostetem Gitea Server. migrate.gogs.description=Migriere Daten von notabug.org oder einem anderen, selbst gehosteten Gogs Server. +migrate.migrating_git=Git Daten werden migriert +migrate.migrating_topics=Themen werden migriert +migrate.migrating_milestones=Meilensteine werden migriert +migrate.migrating_labels=Labels werden migriert +migrate.migrating_releases=Releases werden migriert +migrate.migrating_issues=Issues werden migriert +migrate.migrating_pulls=Pull Requests werden migriert mirror_from=Mirror von forked_from=geforkt von @@ -854,6 +940,7 @@ branch=Branch tree=Struktur clear_ref=`Aktuelle Referenz löschen` filter_branch_and_tag=Branch oder Tag filtern +find_tag=Tag finden branches=Branches tags=Tags issues=Issues @@ -1284,6 +1371,9 @@ issues.review.resolved_by=markierte diese Unterhaltung als gelöst issues.assignee.error=Aufgrund eines unerwarteten Fehlers konnten nicht alle Beauftragten hinzugefügt werden. issues.reference_issue.body=Beschreibung +compare.compare_base=Basis +compare.compare_head=vergleichen + pulls.desc=Pull-Requests und Code-Reviews aktivieren. pulls.new=Neuer Pull-Request pulls.compare_changes=Neuer Pull-Request @@ -1311,7 +1401,10 @@ pulls.manually_merged_as=Dieser Pull Request wurde manuell als Beginne den Titel mit %s um zu verhindern, dass der Pull Request versehentlich gemergt wird.` -pulls.cannot_merge_work_in_progress=Dieser Pull Request wurde als Work In Progress markiert. Entferne den %s-Präfix vom Titel, wenn dieser fertig ist. +pulls.cannot_merge_work_in_progress=Dieser Pull Request ist als Work in Progress markiert. +pulls.still_in_progress=Noch in Bearbeitung? +pulls.add_prefix=%s Präfix hinzufügen +pulls.remove_prefix=%s Präfix entfernen pulls.data_broken=Dieser Pull-Requests ist kaputt, da Fork-Informationen gelöscht wurden. pulls.files_conflicted=Dieser Pull-Request hat Änderungen, die im Widerspruch zum Ziel-Branch stehen. pulls.is_checking=Die Konfliktprüfung läuft noch. Bitte aktualisiere die Seite in wenigen Augenblicken. @@ -1537,6 +1630,15 @@ settings.hooks=Webhooks settings.githooks=Git-Hooks settings.basic_settings=Grundeinstellungen settings.mirror_settings=Mirror-Einstellungen +settings.mirror_settings.docs=Richte dein Projekt so ein, dass Änderungen automatisch in ein anderes Repository gepusht, oder aus einem anderen Repository gepullt werden. Branches, tags und commits werden dann automatisch synchronisiert. Wie kann ich ein Repository spiegeln? (Englisch) +settings.mirror_settings.mirrored_repository=Gespiegeltes Repository +settings.mirror_settings.direction=Richtung +settings.mirror_settings.direction.pull=Pull +settings.mirror_settings.direction.push=Push +settings.mirror_settings.last_update=Letzte Aktualisierung +settings.mirror_settings.push_mirror.none=Keine Push-Mirrors konfiguriert +settings.mirror_settings.push_mirror.remote_url=URL zum Git-Remote-Repository +settings.mirror_settings.push_mirror.add=Push-Mirror hinzufügen settings.sync_mirror=Jetzt synchronisieren settings.mirror_sync_in_progress=Mirror-Synchronisierung wird zurzeit ausgeführt. Komm in ein paar Minuten zurück. settings.email_notifications.enable=E-Mail Benachrichtigungen aktivieren @@ -1545,6 +1647,7 @@ settings.email_notifications.disable=E-Mail Benachrichtigungen deaktivieren settings.email_notifications.submit=E-Mail-Einstellungen festlegen settings.site=Webseite settings.update_settings=Einstellungen speichern +settings.branches.update_default_branch=Standardbranch aktualisieren settings.advanced_settings=Erweiterte Einstellungen settings.wiki_desc=Repository-Wiki aktivieren settings.use_internal_wiki=Eingebautes Wiki verwenden @@ -1574,6 +1677,7 @@ settings.pulls.allow_rebase_merge_commit=Rebasing mit expliziten merge commits a settings.pulls.allow_squash_commits=Mergen von Commits durch Squash aktivieren settings.pulls.allow_manual_merge=Manuelles Mergen von Pull Requests aktivieren settings.pulls.enable_autodetect_manual_merge=Autoerkennung von manuellen Merges aktivieren (in Ausnahmefällen können Fehleinschätzungen auftreten) +settings.pulls.default_delete_branch_after_merge=Standardmäßig bei Pull-Requests den Branch nach dem Mergen löschen settings.projects_desc=Repository-Projekte aktivieren settings.admin_settings=Administratoreinstellungen settings.admin_enable_health_check=Repository-Health-Checks aktivieren (git fsck) @@ -1601,6 +1705,7 @@ settings.transfer_form_title=Gib den Repository-Namen zur Bestätigung ein: settings.transfer_in_progress=Es gibt derzeit eine laufende Übertragung. Bitte brich diese ab, wenn du dieses Repository an einen anderen Benutzer übertragen möchtest. settings.transfer_notices_1=– Du wirst keinen Zugriff mehr haben, wenn der neue Besitzer ein individueller Benutzer ist. settings.transfer_notices_2=– Du wirst weiterhin Zugriff haben, wenn der neue Besitzer eine Organisation ist und du einer der Besitzer bist. +settings.transfer_notices_3=- Wenn das Repository privat ist und an einen einzelnen Benutzer übertragen wird, wird sichergestellt, dass der Benutzer mindestens Leserechte hat (und die Berechtigungen werden gegebenenfalls ändert). settings.transfer_owner=Neuer Besitzer settings.transfer_perform=Übertragung durchführen settings.transfer_started=Für dieses Repository wurde eine Übertragung eingeleitet und wartet nun auf die Bestätigung von "%s" @@ -1724,7 +1829,7 @@ settings.event_pull_request_review_desc=Pull-Request genehmigt, abgelehnt oder K settings.event_pull_request_sync=Pull-Request synchronisiert settings.event_pull_request_sync_desc=Pull-Request synchronisiert. settings.branch_filter=Branch-Filter -settings.branch_filter_desc=Branch-Whitelist für Push, Brancherstellung und Branchlöschung, als glob pattern. Ist dieser leer oder nur * angegeben, werden Ereignisse für alle Branches gemeldet. Siehe Dokumentation unter github.com/gobwas/glob für die Syntax. Beispiele: master, {master,release*}. +settings.branch_filter_desc=Whitelist für Branches für Push-, Erzeugungs- und Löschevents, als glob Pattern beschrieben. Es werden Events für alle Branches gemeldet, falls das Pattern * ist, oder falls es leer ist. Siehe die github.com/gobwas/glob Dokumentation für die Syntax (Englisch). Beispiele: master, {master,release*}. settings.active=Aktiv settings.active_helper=Informationen über ausgelöste Ereignisse werden an diese Webhook-URL gesendet. settings.add_hook_success=Webhook wurde hinzugefügt. @@ -1794,7 +1899,7 @@ settings.dismiss_stale_approvals_desc=Wenn neue Commits gepusht werden, die den settings.require_signed_commits=Signierte Commits erforderlich settings.require_signed_commits_desc=Pushes auf diesen Branch ablehnen, wenn Commits nicht signiert oder nicht überprüfbar sind. settings.protect_protected_file_patterns=Geschützte Dateimuster (durch Semikolon getrennt '\;'): -settings.protect_protected_file_patterns_desc=Geschützte Dateien, die nicht direkt geändert werden dürfen, auch wenn der Benutzer die Rechte hat, Dateien in diesem Branch hinzuzufügen, zu bearbeiten oder zu löschen. Mehrere Muster können mit Semikolon ('\;') getrennt werden. Siehe github.com/gobwas/glob Dokumentation für Mustersyntax. Beispiele: .drone.yml, /docs/**/*.txt. +settings.protect_protected_file_patterns_desc=Geschützte Dateien, die nicht einmal geändert werden können, wenn der Benutzer die Rechte hat, Dateien in diesem Branch hinzuzufügen, zu bearbeiten, oder zu löschen. Verschiedene Pattern können per Semicolon (';') getrennt werden. Siehe die github.com/gobwas/glob Dokumentation für die Pattern Syntax (Englisch). Beispiele: .drone.yml, /docs/**/*.txt. settings.add_protected_branch=Schutz aktivieren settings.delete_protected_branch=Schutz deaktivieren settings.update_protect_branch_success=Branch-Schutz für den Branch „%s“ wurde geändert. @@ -1813,6 +1918,16 @@ settings.choose_branch=Wähle einen Branch … settings.no_protected_branch=Es gibt keine geschützten Branches. settings.edit_protected_branch=Bearbeiten settings.protected_branch_required_approvals_min=Die Anzahl der erforderlichen Zustimmungen darf nicht negativ sein. +settings.tags=Tags +settings.tags.protection=Tag-Schutz +settings.tags.protection.pattern=Tag Muster +settings.tags.protection.allowed=Erlaubt +settings.tags.protection.allowed.users=Erlaubte Benutzer +settings.tags.protection.allowed.teams=Erlaubte Teams +settings.tags.protection.allowed.noone=Niemand +settings.tags.protection.create=Tag schützen +settings.tags.protection.none=Es gibt keine geschützten Tags. +settings.tags.protection.pattern.description=Du kannst einen einzigen Namen oder ein globales Schema oder einen regulären Ausdruck verwenden, um mehrere Tags zu schützen. Mehr dazu im geschützte Tags Guide (Englisch). settings.bot_token=Bot-Token settings.chat_id=Chat-ID settings.matrix.homeserver_url=Homeserver-URL @@ -1826,6 +1941,7 @@ settings.archive.success=Das Repo wurde erfolgreich archiviert. settings.archive.error=Beim Versuch, das Repository zu archivieren, ist ein Fehler aufgetreten. Weitere Details finden sich im Log. settings.archive.error_ismirror=Du kannst keinen Repo-Mirror archivieren. settings.archive.branchsettings_unavailable=Branch-Einstellungen sind nicht verfügbar wenn das Repo archiviert ist. +settings.archive.tagsettings_unavailable=Tag Einstellungen sind nicht verfügbar, wenn das Repo archiviert wurde. settings.unarchive.button=Archivieren rückgängig machen settings.unarchive.header=Archivieren dieses Repos rückgängig machen settings.unarchive.text=Durch das Aufheben der Archivierung kann das Repo wieder Commits und Pushes sowie neue Issues und Pull-Requests empfangen. @@ -1885,6 +2001,7 @@ diff.file_image_width=Breite diff.file_image_height=Höhe diff.file_byte_size=Größe diff.file_suppressed=Datei-Diff unterdrückt, da er zu groß ist +diff.file_suppressed_line_too_long=Dateidiff unterdrückt, weil mindestens eine Zeile zu lang ist diff.too_many_files=Einige Dateien werden nicht angezeigt, da zu viele Dateien in diesem Diff geändert wurden. diff.comment.placeholder=Kommentieren... diff.comment.markdown_info=Styling mit Markdown wird unterstützt. @@ -1912,6 +2029,7 @@ release.new_release=Neues Release release.draft=Entwurf release.prerelease=Pre-Release release.stable=Stabil +release.compare=Vergleichen release.edit=bearbeiten release.ahead.commits=%d Commits release.ahead.target=zu %s seit dieser Version @@ -1938,6 +2056,7 @@ release.deletion_tag_desc=Löscht dieses Tag aus dem Projektarchiv. Repository-I release.deletion_tag_success=Der Tag wurde gelöscht. release.tag_name_already_exist=Ein Release mit diesem Tag existiert bereits. release.tag_name_invalid=Der Tag-Name ist ungültig. +release.tag_name_protected=Der Tag-Name ist geschützt. release.tag_already_exist=Dieser Tag-Name existiert bereits. release.downloads=Downloads release.download_count=Downloads: %s @@ -1969,6 +2088,10 @@ branch.restore=Branch „%s“ wiederherstellen branch.download=Branch „%s“ herunterladen branch.included_desc=Dieser Branch ist im Standard-Branch enthalten branch.included=Enthalten +branch.create_new_branch=Branch aus Branch erstellen: +branch.confirm_create_branch=Branch erstellen +branch.new_branch=Neue Branch erstellen +branch.new_branch_from=Neue Branch von '%s' erstellen tag.create_tag=Tag %s erstellen tag.create_success=Tag "%s" wurde erstellt. @@ -2178,6 +2301,8 @@ dashboard.total_gc_time=Gesamte GC-Pause dashboard.total_gc_pause=Gesamte GC-Pause dashboard.last_gc_pause=Letzte GC-Pause dashboard.gc_times=Anzahl GC +dashboard.delete_old_actions=Alle alten Aktionen aus der Datenbank löschen +dashboard.delete_old_actions.started=Löschen aller alten Aktionen in der Datenbank gestartet. users.user_manage_panel=Benutzerkontenverwaltung users.new_account=Benutzerkonto erstellen @@ -2273,7 +2398,6 @@ auths.host=Host auths.port=Port auths.bind_dn=DN binden auths.bind_password=Passwort binden -auths.bind_password_helper=Achtung: Das Passwort wird im Klartext gespeichert. Benutze, wenn möglich, einen Account, der nur über Lesezugriff verfügt. auths.user_base=Basis für Benutzersuche auths.user_dn=Benutzer-DN auths.attribute_username=Benutzernamens-Attribut @@ -2304,6 +2428,7 @@ auths.allowed_domains_helper=Leer lassen, um alle Domains zuzulassen. Trenne meh auths.enable_tls=TLS-Verschlüsselung aktivieren auths.skip_tls_verify=TLS-Verifikation überspringen auths.pam_service_name=PAM-Dienstname +auths.pam_email_domain=PAM E-Mail-Domain (optional) auths.oauth2_provider=OAuth2-Anbieter auths.oauth2_icon_url=Icon URL auths.oauth2_clientID=Client-ID (Schlüssel) @@ -2403,6 +2528,7 @@ config.db_path=Verzeichnis config.service_config=Service-Konfiguration config.register_email_confirm=E-Mail-Bestätigung benötigt zum Registrieren config.disable_register=Selbstregistrierung deaktivieren +config.allow_only_internal_registration=Registrierung nur über Gitea selbst erlauben config.allow_only_external_registration=Registrierung nur über externe Services erlauben config.enable_openid_signup=OpenID-Selbstregistrierung aktivieren config.enable_openid_signin=OpenID-Anmeldung aktivieren diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index 25b0a1b0bd33..0ead1dfd6d08 100644 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -83,6 +83,7 @@ add = Add add_all = Add All remove = Remove remove_all = Remove All +edit = Edit write = Write preview = Preview @@ -91,11 +92,16 @@ loading = Loading… step1 = Step 1: step2 = Step 2: +error = Error error404 = The page you are trying to reach either does not exist or you are not authorized to view it. +never = Never + [error] occurred = An error has occurred report_message = If you are sure this is a Gitea bug, please search for issue on GitHub and open new issue if necessary. +missing_csrf = Bad Request: no CSRF token present +invalid_csrf = Bad Request: Invalid CSRF token [startpage] app_desc = A painless, self-hosted Git service @@ -299,7 +305,8 @@ openid_connect_desc = The chosen OpenID URI is unknown. Associate it with a new openid_register_title = Create new account openid_register_desc = The chosen OpenID URI is unknown. Associate it with a new account here. openid_signin_desc = Enter your OpenID URI. For example: https://anne.me, bob.openid.org.cn or gnusocial.net/carry. -disable_forgot_password_mail = Account recovery is disabled. Please contact your site administrator. +disable_forgot_password_mail = Account recovery is disabled because no email is set up. Please contact your site administrator. +disable_forgot_password_mail_admin = Account recovery is only available when email is set up. Please set up email to enable account recovery. email_domain_blacklisted = You cannot register with your email address. authorize_application = Authorize Application authorize_redirect_notice = You will be redirected to %s if you authorize this application. @@ -308,25 +315,69 @@ authorize_application_description = If you grant the access, it will be able to authorize_title = Authorize "%s" to access your account? authorization_failed = Authorization failed authorization_failed_desc = The authorization failed because we detected an invalid request. Please contact the maintainer of the app you've tried to authorize. -disable_forgot_password_mail = Account recovery is disabled. Please contact your site administrator. sspi_auth_failed = SSPI authentication failed password_pwned = The password you chose is on a list of stolen passwords previously exposed in public data breaches. Please try again with a different password. password_pwned_err = Could not complete request to HaveIBeenPwned [mail] +view_it_on = View it on %s +link_not_working_do_paste = Not working? Try copying and pasting it to your browser. +hi_user_x = Hi %s, + activate_account = Please activate your account +activate_account.title = %s, please activate your account +activate_account.text_1 = Hi %[1]s, thanks for registering at %[2]s! +activate_account.text_2 = Please click the following link to activate your account within %s: + activate_email = Verify your email address +activate_email.title = %s, please verify your e-mail address +activate_email.text = Please click the following link to verify your email address within %s: + +register_notify = Welcome to Gitea +register_notify.title = %[1]s, welcome to %[2]s +register_notify.text_1 = this is your registration confirmation email for %s! +register_notify.text_2 = You can now login via username: %s. +register_notify.text_3 = If this account has been created for you, please set your password first. + reset_password = Recover your account +reset_password.title = %s, you have requested to recover your account +reset_password.text = Please click the following link to recover your account within %s: + register_success = Registration successful -register_notify = Welcome to Gitea + +issue_assigned.pull = @%[1]s assigned you to the pull request %[2]s in repository %[3]s. +issue_assigned.issue = @%[1]s assigned you to the issue %[2]s in repository %[3]s. + +issue.x_mentioned_you = @%s mentioned you: +issue.action.force_push = %[1]s force-pushed the %[2]s from %[3]s to %[4]s. +issue.action.push_1 = @%[1]s pushed %[3]d commit to %[2]s +issue.action.push_n = @%[1]s pushed %[3]d commits to %[2]s +issue.action.close = @%[1]s closed #%[2]d. +issue.action.reopen = @%[1]s reopened #%[2]d. +issue.action.merge = @%[1]s merged #%[2]d into %[3]s. +issue.action.approve = @%[1]s approved this pull request. +issue.action.reject = @%[1]s requested changes on this pull request. +issue.action.review = @%[1]s commented on this pull request. +issue.action.review_dismissed = @%[1]s dismissed last review from %[2]s for this pull request. +issue.action.ready_for_review = @%[1]s marked this pull request ready for review. +issue.action.new = @%[1]s created #%[2]d. +issue.in_tree_path = In %s: release.new.subject = %s in %s released +release.new.text = @%[1]s released %[2]s in %[3]s +release.title = Title: %s +release.note = Note: +release.downloads = Downloads: +release.download.zip = Source Code (ZIP) +release.download.targz = Source Code (TAR.GZ) repo.transfer.subject_to = %s would like to transfer "%s" to %s repo.transfer.subject_to_you = %s would like to transfer "%s" to you repo.transfer.to_you = you +repo.transfer.body = To accept or reject it visit %s or just ignore it. repo.collaborator.added.subject = %s added you to %s +repo.collaborator.added.text = You have been added as a collaborator of repository: [modal] yes = Yes @@ -367,6 +418,7 @@ email_error = ` is not a valid email address.` url_error = ` is not a valid URL.` include_error = ` must contain substring '%s'.` glob_pattern_error = ` glob pattern is invalid: %s.` +regex_pattern_error = ` regex pattern is invalid: %s.` unknown_error = Unknown error: captcha_incorrect = The CAPTCHA code is incorrect. password_not_match = The passwords do not match. @@ -543,7 +595,20 @@ ssh_key_been_used = This SSH key has already been added to the server. ssh_key_name_used = An SSH key with same name already exists on your account. ssh_principal_been_used = This principal has already been added to the server. gpg_key_id_used = A public GPG key with same ID already exists. -gpg_no_key_email_found = This GPG key is not usable with any email address associated with your account. +gpg_no_key_email_found = This GPG key does not match any activated email address associated with your account. It may still be added if you sign the provided token. +gpg_key_matched_identities = Matched Identities: +gpg_key_matched_identities_long=The embedded identities in this key match the following activated email addresses for this user. Commits matching these email addresses can be verified with this key. +gpg_key_verified=Verified Key +gpg_key_verified_long=Key has been verified with a token and can be used to verify commits matching any activated email addresses for this user in addition to any matched identities for this key. +gpg_key_verify=Verify +gpg_invalid_token_signature = The provided GPG key, signature and token do not match or token is out-of-date. +gpg_token_required = You must provide a signature for the below token +gpg_token = Token +gpg_token_help = You can generate a signature using: +gpg_token_code = echo "%s" | gpg -a --default-key %s --detach-sig +gpg_token_signature = Armored GPG signature +key_signature_gpg_placeholder = Begins with '-----BEGIN PGP SIGNATURE-----' +verify_gpg_key_success = The GPG key '%s' has been verified. subkeys = Subkeys key_id = Key ID key_name = Key Name @@ -674,6 +739,14 @@ email_notifications.onmention = Only Email on Mention email_notifications.disable = Disable Email Notifications email_notifications.submit = Set Email Preference +visibility = User visibility +visibility.public = Public +visibility.public_tooltip = Visible to all users +visibility.limited = Limited +visibility.limited_tooltip = Visible to logged in users only +visibility.private = Private +visibility.private_tooltip = Visible only to organization members + [repo] new_repo_helper = A repository contains all project files, including revision history. Already have it elsewhere? Migrate repository. owner = Owner @@ -724,7 +797,7 @@ mirror_prune_desc = Remove obsolete remote-tracking references mirror_interval = Mirror Interval (valid time units are 'h', 'm', 's'). 0 to disable automatic sync. mirror_interval_invalid = The mirror interval is not valid. mirror_address = Clone From URL -mirror_address_desc = Put any required credentials in the Clone Authorization section. +mirror_address_desc = Put any required credentials in the Authorization section. mirror_address_url_invalid = The provided url is invalid. You must escape all components of the url correctly. mirror_address_protocol_invalid = The provided url is invalid. Only http(s):// or git:// locations can be mirrored from. mirror_lfs = Large File Storage (LFS) @@ -732,6 +805,9 @@ mirror_lfs_desc = Activate mirroring of LFS data. mirror_lfs_endpoint = LFS Endpoint mirror_lfs_endpoint_desc = Sync will attempt to use the clone url to determine the LFS server. You can also specify a custom endpoint if the repository LFS data is stored somewhere else. mirror_last_synced = Last Synchronized +mirror_password_placeholder = (Unchanged) +mirror_password_blank_placeholder = (Unset) +mirror_password_help = Change the username to erase a stored password. watchers = Watchers stargazers = Stargazers forks = Forks @@ -748,6 +824,7 @@ delete_preexisting_label = Delete delete_preexisting = Delete pre-existing files delete_preexisting_content = Delete files in %s delete_preexisting_success = Deleted unadopted files in %s +blame_prior = View blame prior to this change transfer.accept = Accept Transfer transfer.accept_desc = Transfer to "%s" @@ -784,7 +861,7 @@ form.reach_limit_of_creation_n = You have already reached your limit of %d repos form.name_reserved = The repository name '%s' is reserved. form.name_pattern_not_allowed = The pattern '%s' is not allowed in a repository name. -need_auth = Clone Authorization +need_auth = Authorization migrate_options = Migration Options migrate_service = Migration Service migrate_options_mirror_helper = This repository will be a mirror @@ -818,11 +895,19 @@ migrated_from_fake = Migrated From %[1]s migrate.migrate = Migrate From %s migrate.migrating = Migrating from %s ... migrate.migrating_failed = Migrating from %s failed. +migrate.migrating_failed.error = Error: %s migrate.github.description = Migrating data from Github.com or Github Enterprise. migrate.git.description = Migrating or Mirroring git data from Git services migrate.gitlab.description = Migrating data from GitLab.com or Self-Hosted gitlab server. migrate.gitea.description = Migrating data from Gitea.com or Self-Hosted Gitea server. migrate.gogs.description = Migrating data from notabug.org or other Self-Hosted Gogs server. +migrate.migrating_git = Migrating Git Data +migrate.migrating_topics = Migrating Topics +migrate.migrating_milestones = Migrating Milestones +migrate.migrating_labels = Migrating Labels +migrate.migrating_releases = Migrating Releases +migrate.migrating_issues = Migrating Issues +migrate.migrating_pulls = Migrating Pull Requests mirror_from = mirror of forked_from = forked from @@ -855,6 +940,7 @@ branch = Branch tree = Tree clear_ref = `Clear current reference` filter_branch_and_tag = Filter branch or tag +find_tag = Find tag branches = Branches tags = Tags issues = Issues @@ -1285,6 +1371,9 @@ issues.review.resolved_by = marked this conversation as resolved issues.assignee.error = Not all assignees was added due to an unexpected error. issues.reference_issue.body = Body +compare.compare_base = base +compare.compare_head = compare + pulls.desc = Enable pull requests and code reviews. pulls.new = New Pull Request pulls.compare_changes = New Pull Request @@ -1312,7 +1401,10 @@ pulls.manually_merged_as = The pull request has been manually merged as Start the title with %s to prevent the pull request from being merged accidentally.` -pulls.cannot_merge_work_in_progress = This pull request is marked as a work in progress. Remove the %s prefix from the title when it's ready +pulls.cannot_merge_work_in_progress = This pull request is marked as a work in progress. +pulls.still_in_progress = Still in progress? +pulls.add_prefix = Add %s prefix +pulls.remove_prefix = Remove %s prefix pulls.data_broken = This pull request is broken due to missing fork information. pulls.files_conflicted = This pull request has changes conflicting with the target branch. pulls.is_checking = "Merge conflict checking is in progress. Try again in few moments." @@ -1538,6 +1630,15 @@ settings.hooks = Webhooks settings.githooks = Git Hooks settings.basic_settings = Basic Settings settings.mirror_settings = Mirror Settings +settings.mirror_settings.docs = Set up your project to automatically push and/or pull changes to/from another repository. Branches, tags, and commits will be synced automatically. How do I mirror repositories? +settings.mirror_settings.mirrored_repository = Mirrored repository +settings.mirror_settings.direction = Direction +settings.mirror_settings.direction.pull = Pull +settings.mirror_settings.direction.push = Push +settings.mirror_settings.last_update = Last update +settings.mirror_settings.push_mirror.none = No push mirrors configured +settings.mirror_settings.push_mirror.remote_url = Git Remote Repository URL +settings.mirror_settings.push_mirror.add = Add Push Mirror settings.sync_mirror = Synchronize Now settings.mirror_sync_in_progress = Mirror synchronization is in progress. Check back in a minute. settings.email_notifications.enable = Enable Email Notifications @@ -1546,6 +1647,7 @@ settings.email_notifications.disable = Disable Email Notifications settings.email_notifications.submit = Set Email Preference settings.site = Website settings.update_settings = Update Settings +settings.branches.update_default_branch = Update Default Branch settings.advanced_settings = Advanced Settings settings.wiki_desc = Enable Repository Wiki settings.use_internal_wiki = Use Built-In Wiki @@ -1575,6 +1677,7 @@ settings.pulls.allow_rebase_merge_commit = Enable Rebasing with explicit merge c settings.pulls.allow_squash_commits = Enable Squashing to Merge Commits settings.pulls.allow_manual_merge = Enable Mark PR as manually merged settings.pulls.enable_autodetect_manual_merge = Enable autodetect manual merge (Note: In some special cases, misjudgments can occur) +settings.pulls.default_delete_branch_after_merge = Delete pull request branch after merge by default settings.projects_desc = Enable Repository Projects settings.admin_settings = Administrator Settings settings.admin_enable_health_check = Enable Repository Health Checks (git fsck) @@ -1602,6 +1705,7 @@ settings.transfer_form_title = Enter the repository name as confirmation: settings.transfer_in_progress = There is currently an ongoing transfer. Please cancel it if you will like to transfer this repository to another user. settings.transfer_notices_1 = - You will lose access to the repository if you transfer it to an individual user. settings.transfer_notices_2 = - You will keep access to the repository if you transfer it to an organization that you (co-)own. +settings.transfer_notices_3 = - If the repository is private and is transferred to an individual user, this action makes sure that the user does have at least read permission (and changes permissions if necessary). settings.transfer_owner = New Owner settings.transfer_perform = Perform Transfer settings.transfer_started = This repository has been marked for transfer and awaits confirmation from "%s" @@ -1725,7 +1829,7 @@ settings.event_pull_request_review_desc = Pull request approved, rejected, or re settings.event_pull_request_sync = Pull Request Synchronized settings.event_pull_request_sync_desc = Pull request synchronized. settings.branch_filter = Branch filter -settings.branch_filter_desc = Branch whitelist for push, branch creation and branch deletion events, specified as glob pattern. If empty or *, events for all branches are reported. See github.com/gobwas/glob documentation for syntax. Examples: master, {master,release*}. +settings.branch_filter_desc = Branch whitelist for push, branch creation and branch deletion events, specified as glob pattern. If empty or *, events for all branches are reported. See github.com/gobwas/glob documentation for syntax. Examples: master, {master,release*}. settings.active = Active settings.active_helper = Information about triggered events will be sent to this webhook URL. settings.add_hook_success = The webhook has been added. @@ -1795,7 +1899,7 @@ settings.dismiss_stale_approvals_desc = When new commits that change the content settings.require_signed_commits = Require Signed Commits settings.require_signed_commits_desc = Reject pushes to this branch if they are unsigned or unverifiable. settings.protect_protected_file_patterns = Protected file patterns (separated using semicolon '\;'): -settings.protect_protected_file_patterns_desc = Protected files that are not allowed to be changed directly even if user has rights to add, edit, or delete files in this branch. Multiple patterns can be separated using semicolon ('\;'). See github.com/gobwas/glob documentation for pattern syntax. Examples: .drone.yml, /docs/**/*.txt. +settings.protect_protected_file_patterns_desc = Protected files that are not allowed to be changed directly even if user has rights to add, edit, or delete files in this branch. Multiple patterns can be separated using semicolon ('\;'). See github.com/gobwas/glob documentation for pattern syntax. Examples: .drone.yml, /docs/**/*.txt. settings.add_protected_branch = Enable protection settings.delete_protected_branch = Disable protection settings.update_protect_branch_success = Branch protection for branch '%s' has been updated. @@ -1814,6 +1918,16 @@ settings.choose_branch = Choose a branch… settings.no_protected_branch = There are no protected branches. settings.edit_protected_branch = Edit settings.protected_branch_required_approvals_min = Required approvals cannot be negative. +settings.tags = Tags +settings.tags.protection = Tag Protection +settings.tags.protection.pattern = Tag Pattern +settings.tags.protection.allowed = Allowed +settings.tags.protection.allowed.users = Allowed users +settings.tags.protection.allowed.teams = Allowed teams +settings.tags.protection.allowed.noone = No One +settings.tags.protection.create = Protect Tag +settings.tags.protection.none = There are no protected tags. +settings.tags.protection.pattern.description = You can use a single name or a glob pattern or regular expression to match multiple tags. Read more in the protected tags guide. settings.bot_token = Bot Token settings.chat_id = Chat ID settings.matrix.homeserver_url = Homeserver URL @@ -1827,6 +1941,7 @@ settings.archive.success = The repo was successfully archived. settings.archive.error = An error occurred while trying to archive the repo. See the log for more details. settings.archive.error_ismirror = You cannot archive a mirrored repo. settings.archive.branchsettings_unavailable = Branch settings are not available if the repo is archived. +settings.archive.tagsettings_unavailable = Tag settings are not available if the repo is archived. settings.unarchive.button = Un-Archive Repo settings.unarchive.header = Un-Archive This Repo settings.unarchive.text = Un-Archiving the repo will restore its ability to receive commits and pushes, as well as new issues and pull-requests. @@ -1886,6 +2001,7 @@ diff.file_image_width = Width diff.file_image_height = Height diff.file_byte_size = Size diff.file_suppressed = File diff suppressed because it is too large +diff.file_suppressed_line_too_long = File diff suppressed because one or more lines are too long diff.too_many_files = Some files were not shown because too many files changed in this diff diff.comment.placeholder = Leave a comment diff.comment.markdown_info = Styling with markdown is supported. @@ -1913,6 +2029,7 @@ release.new_release = New Release release.draft = Draft release.prerelease = Pre-Release release.stable = Stable +release.compare = Compare release.edit = edit release.ahead.commits = %d commits release.ahead.target = to %s since this release @@ -1939,6 +2056,7 @@ release.deletion_tag_desc = Will delete this tag from repository. Repository con release.deletion_tag_success = The tag has been deleted. release.tag_name_already_exist = A release with this tag name already exists. release.tag_name_invalid = The tag name is not valid. +release.tag_name_protected = The tag name is protected. release.tag_already_exist = This tag name already exists. release.downloads = Downloads release.download_count = Downloads: %s @@ -1970,6 +2088,10 @@ branch.restore = Restore Branch '%s' branch.download = Download Branch '%s' branch.included_desc = This branch is part of the default branch branch.included = Included +branch.create_new_branch = Create branch from branch: +branch.confirm_create_branch = Create branch +branch.new_branch = Create new branch +branch.new_branch_from = Create new branch from '%s' tag.create_tag = Create tag %s tag.create_success = Tag '%s' has been created. @@ -2276,7 +2398,6 @@ auths.host = Host auths.port = Port auths.bind_dn = Bind DN auths.bind_password = Bind Password -auths.bind_password_helper = Warning: This password is stored in plain text. Use a read-only account if possible. auths.user_base = User Search Base auths.user_dn = User DN auths.attribute_username = Username Attribute @@ -2307,6 +2428,7 @@ auths.allowed_domains_helper = Leave empty to allow all domains. Separate multip auths.enable_tls = Enable TLS Encryption auths.skip_tls_verify = Skip TLS Verify auths.pam_service_name = PAM Service Name +auths.pam_email_domain = PAM Email Domain (optional) auths.oauth2_provider = OAuth2 Provider auths.oauth2_icon_url = Icon URL auths.oauth2_clientID = Client ID (Key) @@ -2406,6 +2528,7 @@ config.db_path = Path config.service_config = Service Configuration config.register_email_confirm = Require Email Confirmation to Register config.disable_register = Disable Self-Registration +config.allow_only_internal_registration = Allow Registration Only Through Gitea itself config.allow_only_external_registration = Allow Registration Only Through External Services config.enable_openid_signup = Enable OpenID Self-Registration config.enable_openid_signin = Enable OpenID Sign-In diff --git a/options/locale/locale_es-ES.ini b/options/locale/locale_es-ES.ini index dfb07ff9f901..2a10e289ed0b 100644 --- a/options/locale/locale_es-ES.ini +++ b/options/locale/locale_es-ES.ini @@ -83,6 +83,7 @@ add=Añadir add_all=Añadir todo remove=Eliminar remove_all=Eliminar todos +edit=Editar write=Escribir preview=Vista previa @@ -91,11 +92,16 @@ loading=Cargando… step1=Paso 1: step2=Paso 2: +error=Error error404=La página a la que está intentando acceder o no existe o no está autorizado para verla. +never=Nunca + [error] occurred=Se ha producido un error report_message=Si estás seguro de que este es un error de Gitea, por favor busca un problema en GitHub y abre un nuevo problema si es necesario. +missing_csrf=Solicitud incorrecta: sin token CSRF +invalid_csrf=Solicitud incorrecta: el token CSRF no es válido [startpage] app_desc=Un servicio de Git autoalojado y sin complicaciones @@ -269,8 +275,8 @@ account_activated=La cuenta ha sido activada prohibit_login=Ingreso prohibido prohibit_login_desc=Su cuenta tiene prohibido ingresar al sistema. Por favor contacte con el administrador del sistema. resent_limit_prompt=Ya ha solicitado recientemente un correo de activación. Por favor, espere 3 minutos y vuelva a intentarlo. -has_unconfirmed_mail=Hola %s, tu correo electrónico (%s) no está confirmado. Si no has recibido un correo de confirmación o necesitas que lo enviemos de nuevo, por favor, haz click en el siguiente botón. -resend_mail=Haz click aquí para reenviar tu correo electrónico de activación +has_unconfirmed_mail=Hola %s, su correo electrónico (%s) no está confirmado. Si no ha recibido un correo de confirmación o necesita que lo enviemos de nuevo, por favor, haga click en el siguiente botón. +resend_mail=Haga click aquí para reenviar su correo electrónico de activación email_not_associate=Esta dirección de correo electrónico no esta asociada a ninguna cuenta. send_reset_mail=Enviar correo de recuperación de cuenta reset_password=Recuperación de cuenta @@ -299,7 +305,8 @@ openid_connect_desc=La URI OpenID elegida es desconocida. Asóciela a una nueva openid_register_title=Crear una nueva cuenta openid_register_desc=La URI OpenID elegida es desconocida. Asóciela a una nueva cuenta aquí. openid_signin_desc=Introduzca su URI OpenID. Por ejemplo: https://anne.me, bob.openid.org.cn o gnusocial.net/carry. -disable_forgot_password_mail=La recuperación de cuentas está desactivada. Por favor, contacte con el administrador del sitio. +disable_forgot_password_mail=La recuperación de cuentas está desactivada porque no hay correo electrónico configurado. Por favor, contacte con el administrador del sitio. +disable_forgot_password_mail_admin=La recuperación de cuentas solo está disponible cuando se configura el correo electrónico configurado. Por favor, configure el correo electrónico para permitir la recuperación de cuentas. email_domain_blacklisted=No puede registrarse con su correo electrónico. authorize_application=Autorizar aplicación authorize_redirect_notice=Será redirigido a %s si autoriza esta aplicación. @@ -313,19 +320,64 @@ password_pwned=La contraseña que eligió está en una establezca su contraseña primero. + reset_password=Recupere su cuenta +reset_password.title=%s, has solicitado recuperar tu cuenta +reset_password.text=Haga clic en el siguiente enlace para recuperar su cuenta dentro de %s: + register_success=Registro completado -register_notify=¡Bienvenido a Gitea + +issue_assigned.pull=@%[1]s le asignó al pull request %[2]s en el repositorio %[3]s. +issue_assigned.issue=@%[1]s le asignó a la incidencia %[2]s en el repositorio %[3]s. + +issue.x_mentioned_you=@%s te mencionó: +issue.action.force_push=%[1]s empujó a la fuerza el %[2]s de %[3]s a %[4]s. +issue.action.push_1=@%[1]s hizo %[3]d commit al %[2]s +issue.action.push_n=@%[1]s push %[3]d commits a %[2]s +issue.action.close=@%[1]s cerró #%[2]d. +issue.action.reopen=@%[1]s reabrió #%[2]d. +issue.action.merge=@%[1]s fusionó #%[2]d en %[3]s. +issue.action.approve=@%[1]s aprobó este pull request. +issue.action.reject=@%[1]s solicitó cambios en este pull request. +issue.action.review=@%[1]s comentó en este pull request. +issue.action.review_dismissed=@%[1]s descartó la última revisión de %[2]s para este pull request. +issue.action.ready_for_review=@%[1]s marcó este pull request listo para ser revisado. +issue.action.new=@%[1]s creó #%[2]d. +issue.in_tree_path=En %s: release.new.subject=%s en %s publicado +release.new.text=@%[1]s lanzó %[2]s en %[3]s +release.title=Título: %s +release.note=Nota: +release.downloads=Descargas: +release.download.zip=Código fuente (ZIP) +release.download.targz=Código fuente (TAR.GZ) repo.transfer.subject_to=%s desea transferir "%s" a %s repo.transfer.subject_to_you=%s desea transferir "%s" a usted repo.transfer.to_you=usted +repo.transfer.body=Para aceptarlo o rechazarlo, visita %s o simplemente ignórelo. repo.collaborator.added.subject=%s le añadió en %s +repo.collaborator.added.text=Has sido añadido como colaborador del repositorio: [modal] yes=Sí @@ -366,6 +418,7 @@ email_error=` no es una dirección de correo válida.` url_error=` no es una URL válida.` include_error=` debe contener la subcadena '%s'.` glob_pattern_error=` el patrón globo no es válido: %s.` +regex_pattern_error=` el patrón de regex no es válido: %s.` unknown_error=Error desconocido: captcha_incorrect=El código CAPTCHA no es correcto. password_not_match=Las contraseñas no coinciden. @@ -542,7 +595,20 @@ ssh_key_been_used=Esta clave SSH ya ha sido añadida al servidor. ssh_key_name_used=Una clave SSH con el mismo nombre ya ha sido añadida a su cuenta. ssh_principal_been_used=Este principal ya ha sido añadido al servidor. gpg_key_id_used=Ya existe una clave GPG pública con el mismo ID. -gpg_no_key_email_found=Esta clave GPG no es usable con ninguna de las direcciones de correo electrónico asociadas con su cuenta. +gpg_no_key_email_found=Esta clave GPG no coincide con ninguna dirección de correo electrónico activada asociada a su cuenta. Todavía puede ser añadida si firma el token proporcionado. +gpg_key_matched_identities=Identidades coincidentes: +gpg_key_matched_identities_long=Las identidades incrustadas en esta clave coinciden con las siguientes direcciones de correo electrónico activadas para este usuario. Los commits que coincidan con estas direcciones de correo electrónico pueden ser verificados con esta clave. +gpg_key_verified=Clave verificada +gpg_key_verified_long=La clave ha sido verificada con un token y puede ser usada para verificar confirmaciones que coincidan con cualquier dirección de correo electrónico activada para este usuario, además de cualquier identidad coincidente para esta clave. +gpg_key_verify=Verificar +gpg_invalid_token_signature=La clave GPG proporcionada, la firma y el token no coinciden o el token está desactualizado. +gpg_token_required=Debe proporcionar una firma para el token de abajo +gpg_token=Token +gpg_token_help=Puede generar una firma de la siguiente manera: +gpg_token_code=echo "%s" | gpg -a --default-key %s --detach-sig +gpg_token_signature=Firma GPG armadura +key_signature_gpg_placeholder=Comienza con '-----BEGIN PGP SIGNATURE-----' +verify_gpg_key_success=La clave GPG '%s' ha sido verificada. subkeys=Subclaves key_id=ID de clave key_name=Nombre de la Clave @@ -673,6 +739,14 @@ email_notifications.onmention=Enviar correo sólo al ser mencionado email_notifications.disable=Deshabilitar las notificaciones por correo electrónico email_notifications.submit=Establecer preferencias de correo electrónico +visibility=Visibilidad del usuario +visibility.public=Público +visibility.public_tooltip=Visible para todos los usuarios +visibility.limited=Limitado +visibility.limited_tooltip=Visible sólo para usuarios conectados +visibility.private=Privado +visibility.private_tooltip=Sólo visible para los miembros de la organización + [repo] new_repo_helper=Un repositorio contiene todos los archivos del proyecto, incluyendo el historial de revisiones. ¿Ya lo tiene en otro lugar? Migrar repositorio. owner=Propietario @@ -723,7 +797,7 @@ mirror_prune_desc=Eliminar referencias de seguimiento de remotes obsoletas mirror_interval=Intervalo de réplica (Las unidades de tiempo válidas son 'h', 'm', 's'). Pone 0 para deshabilitar la sincronización automática. mirror_interval_invalid=El intervalo de réplica no es válido. mirror_address=Clonar desde URL -mirror_address_desc=Agregue las credenciales que sean necesarias en la sección de Autorización de Clonado. +mirror_address_desc=Ponga cualquier credencial requerida en la sección de Autorización. mirror_address_url_invalid=La url proporcionada no es válida. Debe escapar correctamente de todos los componentes de la url. mirror_address_protocol_invalid=La url proporcionada no es válida. Sólo las ubicaciones http(s):// o git:// pueden ser replicadas desde. mirror_lfs=Almacenamiento de archivos grande (LFS) @@ -731,6 +805,9 @@ mirror_lfs_desc=Activar la reproducción de datos LFS. mirror_lfs_endpoint=Punto final de LFS mirror_lfs_endpoint_desc=Sync intentará usar la url del clon para determinar el servidor LFS. También puede especificar un punto final personalizado si los datos LFS del repositorio se almacenan en otro lugar. mirror_last_synced=Sincronizado por última vez +mirror_password_placeholder=(Sin cambios) +mirror_password_blank_placeholder=(Indefinido) +mirror_password_help=Cambie el nombre de usario para eliminar una contraseña almacenada. watchers=Seguidores stargazers=Fans forks=Forks @@ -747,6 +824,7 @@ delete_preexisting_label=Eliminar delete_preexisting=Eliminar archivos preexistentes delete_preexisting_content=Eliminar archivos en %s delete_preexisting_success=Eliminó archivos no adoptados en %s +blame_prior=Ver la culpa antes de este cambio transfer.accept=Aceptar transferencia transfer.accept_desc=Transferir a "%s" @@ -783,7 +861,7 @@ form.reach_limit_of_creation_n=Ya han alcanzado su límite de repositorios de %d form.name_reserved=El nombre de repositorio '%s' está reservado. form.name_pattern_not_allowed=El patrón '%s' no está permitido en un nombre de repositorio. -need_auth=Autorización de clonación +need_auth=Autorización migrate_options=Opciones de migración migrate_service=Servicio de Migración migrate_options_mirror_helper=Este repositorio será uno replicado @@ -817,11 +895,19 @@ migrated_from_fake=Migrado desde %[1]s migrate.migrate=Migrar desde %s migrate.migrating=Migrando desde %s... migrate.migrating_failed=La migración desde %s ha fallado. +migrate.migrating_failed.error=Error: %s migrate.github.description=Migrar datos de Github.com o Github Enterprise. migrate.git.description=Migrar o replicar de datos de git desde los servicios de Git migrate.gitlab.description=Migrar datos de GitLab.com o servidor gitlab autoalojado. migrate.gitea.description=Migrando datos de Gitea.com o servidor Gitea autoalojado. migrate.gogs.description=Migrando datos de notabug.org u otro servidor de Gogs autoalojado. +migrate.migrating_git=Migrando datos de Git +migrate.migrating_topics=Migrando Temas +migrate.migrating_milestones=Migrando Hitos +migrate.migrating_labels=Migrando etiquetas +migrate.migrating_releases=Migrando Lanzamientos +migrate.migrating_issues=Migrando Incidencías +migrate.migrating_pulls=Migrando Pull Requests mirror_from=réplica de forked_from=forkeado de @@ -854,6 +940,7 @@ branch=Rama tree=Árbol clear_ref=`Borrar referencia actual` filter_branch_and_tag=Filtrar por rama o etiqueta +find_tag=Buscar etiqueta branches=Ramas tags=Etiquetas issues=Incidencias @@ -931,7 +1018,7 @@ editor.file_is_a_symlink='%s' es un enlace simbólico. Los enlaces simbólicos n editor.filename_is_a_directory=Nombre de archivo '%s' ya se utiliza como un nombre de directorio en este repositorio. editor.file_editing_no_longer_exists=El archivo que está editando, '%s', ya no existe en este repositorio. editor.file_deleting_no_longer_exists=El archivo que se está eliminando, '%s', ya no existe en este repositorio. -editor.file_changed_while_editing=Desde que comenzó a editar, el contenido del archivo ha sido cambiado. Clic aquí para ver qué ha cambiado o presione confirmar de nuevo para sobrescribir los cambios. +editor.file_changed_while_editing=Desde que comenzó a editar, el contenido del archivo ha sido cambiado. Haga clic aquí para ver qué ha cambiado o presione confirmar de nuevo para sobrescribir los cambios. editor.file_already_exists=Ya existe un archivo con nombre '%s' en este repositorio. editor.commit_empty_file_header=Commit un archivo vacío editor.commit_empty_file_text=El archivo que estás tratando de commit está vacío. ¿Proceder? @@ -1158,7 +1245,7 @@ issues.label_color=Color etiqueta issues.label_count=%d etiquetas issues.label_open_issues=%d incidencias abiertas issues.label_edit=Editar -issues.label_delete=Borrar +issues.label_delete=Eliminar issues.label_modify=Editar etiqueta issues.label_deletion=Eliminar etiqueta issues.label_deletion_desc=Eliminar una etiqueta la elimina de todos las incidencias. ¿Continuar? @@ -1284,6 +1371,9 @@ issues.review.resolved_by=ha marcado esta conversación como resuelta issues.assignee.error=No todos los asignados fueron añadidos debido a un error inesperado. issues.reference_issue.body=Cuerpo +compare.compare_base=base +compare.compare_head=comparar + pulls.desc=Activar Pull Requests y revisiones de código. pulls.new=Nuevo Pull Request pulls.compare_changes=Nuevo pull request @@ -1311,7 +1401,10 @@ pulls.manually_merged_as=El Pull Request se ha fusionado manualmente como Comience el título con %s para prevenir que el pull request se fusione accidentalmente.` -pulls.cannot_merge_work_in_progress=Este pull request está marcado como un trabajo en progreso. Elimine el prefijo %s del título cuando esté listo +pulls.cannot_merge_work_in_progress=Este pull request está marcado como un trabajo en curso. +pulls.still_in_progress=¿Aún en curso? +pulls.add_prefix=Añadir prefijo %s +pulls.remove_prefix=Eliminar prefijo %s pulls.data_broken=Este pull request está rota debido a que falta información del fork. pulls.files_conflicted=Este pull request tiene cambios en conflicto con la rama de destino. pulls.is_checking=La comprobación de conflicto de fusión está en progreso. Inténtalo de nuevo en unos momentos. @@ -1537,6 +1630,15 @@ settings.hooks=Webhooks settings.githooks=Git Hooks settings.basic_settings=Configuración Básica settings.mirror_settings=Configuración de réplica +settings.mirror_settings.docs=Configure su proyecto para insertar y/o extraer automáticamente los cambios hacia/desde otro repositorio. Las ramas, etiquetas y commits se sincronizarán automáticamente. ¿Cómo replico los repositorios? +settings.mirror_settings.mirrored_repository=Repositorio Replicado +settings.mirror_settings.direction=Dirección +settings.mirror_settings.direction.pull=Pull +settings.mirror_settings.direction.push=Push +settings.mirror_settings.last_update=Última actualización +settings.mirror_settings.push_mirror.none=No hay Réplicas de Push configurados +settings.mirror_settings.push_mirror.remote_url=URL del repositorio remoto de Git +settings.mirror_settings.push_mirror.add=Añadir Réplica de Push settings.sync_mirror=Sincronizar ahora settings.mirror_sync_in_progress=La sincronización del repositorio replicado está en curso. Vuelva a intentarlo más tarde. settings.email_notifications.enable=Habilitar las notificaciones por correo electrónico @@ -1545,6 +1647,7 @@ settings.email_notifications.disable=Deshabilitar las notificaciones por correo settings.email_notifications.submit=Establecer Preferencia de correo electrónico settings.site=Sitio web settings.update_settings=Actualizar configuración +settings.branches.update_default_branch=Actualizar rama por defecto settings.advanced_settings=Ajustes avanzados settings.wiki_desc=Activar Wiki de repositorio settings.use_internal_wiki=Usar Wiki integrada @@ -1574,6 +1677,7 @@ settings.pulls.allow_rebase_merge_commit=Activar Rebase con commits explícitos settings.pulls.allow_squash_commits=Activar Squash en los commits fusionados settings.pulls.allow_manual_merge=Habilitar marcar PR como fusionado manualmente settings.pulls.enable_autodetect_manual_merge=Habilitar la autodetección de los commits fusionado manualmente (Nota: en algunos casos especiales, pueden producirse errores de apreciación) +settings.pulls.default_delete_branch_after_merge=Eliminar por defecto la rama de pull request después de fusionar settings.projects_desc=Activar Proyectos de Repositorio settings.admin_settings=Ajustes de administrador settings.admin_enable_health_check=Activar cheques de estado de salud del repositorio (git fsck) @@ -1601,6 +1705,7 @@ settings.transfer_form_title=Escriba el nombre del repositorio como confirmació settings.transfer_in_progress=Actualmente hay una transferencia en curso. Por favor, cancela si quieres transferir este repositorio a otro usuario. settings.transfer_notices_1=- Perderá el acceso al repositorio si lo transfiere a un usuario individual. settings.transfer_notices_2=- Mantendrá el acceso al repositorio si lo transfiere a una organización que usted (co-)posee. +settings.transfer_notices_3=- Si el repositorio es privado y se transfiere a un usuario individual, esta acción se asegura de que el usuario tenga al menos permisos de lectura (y cambie los permisos si es necesario). settings.transfer_owner=Nuevo Propietario settings.transfer_perform=Realizar transferencia settings.transfer_started=Este repositorio ha sido marcado para transferencia y espera confirmación de "%s" @@ -1724,7 +1829,7 @@ settings.event_pull_request_review_desc=Pull request aprobado, rechazado o comen settings.event_pull_request_sync=Pull Request sincronizado settings.event_pull_request_sync_desc=Pull request sincronizado. settings.branch_filter=Filtro de rama -settings.branch_filter_desc=Lista blanca de rama para eventos de push, creación de rama y eliminación de rama, especificados como patrón globo. Si está vacío o *, se reportan eventos para todas las ramas. Ver github.com/gobwas/glob documentación para la sintaxis. Ejemplos: master, {master,release*}. +settings.branch_filter_desc=Lista blanca de rama para eventos de push, creación de rama y eliminación de rama, especificados como patrón globo. Si está vacío o *, se reportan eventos para todas las ramas. Ver github.com/gobwas/glob documentación para la sintaxis. Ejemplos: master, {master,release*}. settings.active=Activo settings.active_helper=La información sobre los eventos desencadenados se enviará a esta URL de webhook. settings.add_hook_success=El webhook ha sido añadido. @@ -1794,7 +1899,7 @@ settings.dismiss_stale_approvals_desc=Cuando los nuevos commits que cambien el c settings.require_signed_commits=Requiere commits firmados settings.require_signed_commits_desc=Rechazar push en esta rama si los commits no están firmados o no son verificables. settings.protect_protected_file_patterns=Patrones de archivos protegidos (separados con punto y coma '\;'): -settings.protect_protected_file_patterns_desc=No se permite cambiar directamente archivos protegidos, incluso si el usuario tiene derechos para añadir, editar o eliminar archivos en esta rama. Se pueden separar múltiples patrones usando punto y coma ('\;'). Véase la documentación de github.com/gobwas/glob sobre sintaxis de patrones. Ejemplos: .drone.yml,/docs/**/*.txt. +settings.protect_protected_file_patterns_desc=Archivos protegidos que no están permitidos a ser cambiados directamente incluso si el usuario tiene permiso para agregar, editar o borrar archivos en esta rama. Múltiples patrones pueden separarse usando punto y coma ('\;'). Vea la documentación de github.com/gobwas/glob para la sintaxis de patrones. Ejemplos: .drone.yml, /docs/**/*.txt. settings.add_protected_branch=Activar protección settings.delete_protected_branch=Desactivar protección settings.update_protect_branch_success=La protección de la rama '%s' ha sido actualizada. @@ -1813,6 +1918,16 @@ settings.choose_branch=Elija una rama… settings.no_protected_branch=No hay ramas protegidas. settings.edit_protected_branch=Editar settings.protected_branch_required_approvals_min=Las aprobaciones necesarias no pueden ser negativas. +settings.tags=Etiquetas +settings.tags.protection=Protección de etiquetas +settings.tags.protection.pattern=Patrón de etiquetas +settings.tags.protection.allowed=Permitido +settings.tags.protection.allowed.users=Usuarios permitidos +settings.tags.protection.allowed.teams=Equipos permitidos +settings.tags.protection.allowed.noone=Ningún +settings.tags.protection.create=Proteger Etiqueta +settings.tags.protection.none=No hay etiquetas protegidas. +settings.tags.protection.pattern.description=Puede usar un solo nombre o un patrón de glob o expresión regular para que coincida con varias etiquetas. Lea más en la guía de etiquetas protegida. settings.bot_token=Token del Bot settings.chat_id=ID Chat settings.matrix.homeserver_url=URL de Homeserver @@ -1826,6 +1941,7 @@ settings.archive.success=El repositorio ha sido archivado exitosamente. settings.archive.error=Ha ocurrido un error al intentar archivar el repositorio. Vea el registro para más detalles. settings.archive.error_ismirror=No puede archivar un repositorio replicado. settings.archive.branchsettings_unavailable=Los ajustes de rama no están disponibles si el repositorio está archivado. +settings.archive.tagsettings_unavailable=Los ajustes de las etiquetas no están disponibles si el repositorio está archivado. settings.unarchive.button=Desarchivar Repositorio settings.unarchive.header=Desarchivar este Repositorio settings.unarchive.text=Des-archivar el repositorio restaurará su capacidad de recibir commits y pushes, así como nuevas incidencias y pull-requests. @@ -1885,6 +2001,7 @@ diff.file_image_width=Anchura diff.file_image_height=Altura diff.file_byte_size=Tamaño diff.file_suppressed=La diferencia del archivo ha sido suprimido porque es demasiado grande +diff.file_suppressed_line_too_long=Las diferiencias del archivo han sido suprimidas porque una o mas lineas son muy largas diff.too_many_files=Algunos archivos no se mostraron porque demasiados archivos cambiaron en este cambio diff.comment.placeholder=Deja un comentario diff.comment.markdown_info=Es posible estilizar con markdown. @@ -1912,6 +2029,7 @@ release.new_release=Nueva Release release.draft=Borrador release.prerelease=Pre-lanzamiento release.stable=Estable +release.compare=Comparar release.edit=editar release.ahead.commits=%d commits release.ahead.target=a %s desde esta versión @@ -1938,6 +2056,7 @@ release.deletion_tag_desc=Eliminará esta etiqueta del repositorio. El contenido release.deletion_tag_success=La etiqueta ha sido eliminada. release.tag_name_already_exist=Ya existe uno lanzamiento con esta etiqueta. release.tag_name_invalid=El nombre de la etiqueta no es válido. +release.tag_name_protected=El nombre de la etiqueta está protegido. release.tag_already_exist=Este nombre de etiqueta ya existe. release.downloads=Descargas release.download_count=Descargas: %s @@ -1969,6 +2088,10 @@ branch.restore=Restaurar rama '%s' branch.download=Descargar rama '%s' branch.included_desc=Esta rama forma parte de la predeterminada branch.included=Incluida +branch.create_new_branch=Crear rama desde la rama: +branch.confirm_create_branch=Crear rama +branch.new_branch=Crear nueva rama +branch.new_branch_from=Crear nueva rama desde '%s' tag.create_tag=Crear etiqueta %s tag.create_success=La etiqueta '%s' ha sido creada. @@ -2129,7 +2252,7 @@ dashboard.cron.error=Error en Cron: %s: %[3]s dashboard.cron.finished=Cron: %[1]s ha finalizado dashboard.delete_inactive_accounts=Eliminar todas las cuentas inactivas dashboard.delete_inactive_accounts.started=Se ha iniciado la tarea: "Eliminar todas las cuentas inactivas". -dashboard.delete_repo_archives=Borrar todos los archivos del repositorio (ZIP, TAR.GZ, etc.) +dashboard.delete_repo_archives=Eliminar todos los archivos del repositorio (ZIP, TAR.GZ, etc.) dashboard.delete_repo_archives.started=Se ha iniciado la tarea: "Eliminar todos los archivos del repositorios". dashboard.delete_missing_repos=Eliminar todos los repositorios que faltan sus archivos Git dashboard.delete_missing_repos.started=Se ha iniciado la tarea: "Eliminar todos los repositorios que faltan sus archivos Git". @@ -2178,6 +2301,8 @@ dashboard.total_gc_time=Pausa Total por GC dashboard.total_gc_pause=Pausa Total por GC dashboard.last_gc_pause=Última Pausa por GC dashboard.gc_times=Ejecuciones GC +dashboard.delete_old_actions=Eliminar todas las acciones antiguas de la base de datos +dashboard.delete_old_actions.started=Eliminar todas las acciones antiguas de la base de datos inicializada. users.user_manage_panel=Gestión de cuentas de usuario users.new_account=Crear Cuenta de Usuario @@ -2273,7 +2398,6 @@ auths.host=Servidor auths.port=Puerto auths.bind_dn=Bind DN auths.bind_password=Contraseña Bind -auths.bind_password_helper=Advertencia: Esta contraseña se almacena en texto plano. Utilice una cuenta de sólo lectura si es posible. auths.user_base=Base de búsqueda de usuarios auths.user_dn=DN de Usuario auths.attribute_username=Atributo nombre de usuario @@ -2304,6 +2428,7 @@ auths.allowed_domains_helper=Dejar vacío para permitir todos los dominios. Sepa auths.enable_tls=Habilitar cifrado TLS auths.skip_tls_verify=Omitir la verificación TLS auths.pam_service_name=Nombre del Servicio PAM +auths.pam_email_domain=Dominio de correo de PAM (opcional) auths.oauth2_provider=Proveedor OAuth2 auths.oauth2_icon_url=URL de icono auths.oauth2_clientID=ID de cliente (clave) @@ -2403,6 +2528,7 @@ config.db_path=Ruta config.service_config=Configuración del servicio config.register_email_confirm=Requerir confirmación de correo electrónico para registrarse config.disable_register=Deshabilitar auto-registro +config.allow_only_internal_registration=Permitir el registro solo desde Gitea config.allow_only_external_registration=Permitir el registro únicamente a través de servicios externos config.enable_openid_signup=Habilitar el auto-registro con OpenID config.enable_openid_signin=Habilitar el inicio de sesión con OpenID diff --git a/options/locale/locale_fa-IR.ini b/options/locale/locale_fa-IR.ini index c5c36ea86c9d..cf36d01996b9 100644 --- a/options/locale/locale_fa-IR.ini +++ b/options/locale/locale_fa-IR.ini @@ -87,6 +87,7 @@ loading=بارگذاری… error404=صفحه موردنظر شما یا وجود ندارد یا شما دسترسی کافی برای مشاهده آن را ندارید. + [error] occurred=خطایی رخ داده است report_message=اگر شما مطمئن هستیند این مشکل مربوط به یک باگ در Gitea است، لطفا در GitHub مشکل را جستجو کنید و در صورت نیاز، یک موضوع جدید باز کنید. @@ -286,7 +287,6 @@ openid_connect_desc=نشانی OpenID URI وارد شده شناخته نشد. openid_register_title=ایجاد یک حساب جدید openid_register_desc=نشانی URI وارد شده شناخته نشد. آن را با یک حساب جدید متصل کنید. openid_signin_desc=نوع حساب کاربری خود را وارد کنید. به عنوان مثال: https://anne.me و bob.openid.org.cn یا gnusocial.net/carry. -disable_forgot_password_mail=بازیابی حساب غیر فعال شده است. لطفا با مدیر سایت تماس بگیرید. email_domain_blacklisted=شما نمیتوانید با ایمیل خود ثبت نام کنید. authorize_application=برنامه احراز هویت authorize_redirect_notice=اگر شما این برنامه را تایید کنید، به %s منتقل خواهید شد. @@ -298,11 +298,18 @@ authorization_failed_desc=تاییدیه ناموفق بود. لذا ما درخ sspi_auth_failed=SSPI عدم احراز هویت [mail] + activate_account=لطفا حساب خود را فعال کنید + activate_email=نشانی ایمیل خود را تایید کنید + +register_notify=به گیتی یا گیت‌گو خوش آمدید + reset_password=حساب خود را دوباره فعال کنید + register_success=ثبت‌نام با موفقیت انجام شد -register_notify=به گیتی یا گیت‌گو خوش آمدید + + @@ -504,7 +511,6 @@ add_new_key=اضافه کردن کلید SSH add_new_gpg_key=اضافه کردن کلید GPG ssh_key_been_used=این کلید SSH پیش از این به سرور افزوده شده است. gpg_key_id_used=یک کلید GPG با این ID پیش از این وجود داشته است. -gpg_no_key_email_found=این کلید GPG با هیچ ایمیلی که به حساب شما مرتبط است، قابل استفاده نیست. subkeys=کلید های زیر مجموعه key_id=شناسه کلید key_name=نام کلید @@ -627,6 +633,7 @@ email_notifications.onmention=فقط یادآوری توسط ایمیل email_notifications.disable=غیرفعال‌ کردن اعلان‌های ایمیل email_notifications.submit=ثبت اولویت ایمیل + [repo] owner=مالک repo_name=نام مخزن @@ -665,7 +672,6 @@ mirror_prune_desc=حذف منابع پیگیری‌راه‌دور منسوخ mirror_interval=بازه زمانی قرینه سازی (mirror) با 'h', 'm', 's'. برای غیر فعال کردن همگام سازی خودکار 0 بگذارید. mirror_interval_invalid=بازه زمانی سازی قرینه نیست. mirror_address=همسان‌سازی از نشانی -mirror_address_desc=هر گواهینامه لازم را در بخش Clone Authority (مجوز همسان‌سازی) قرار دهید. mirror_address_url_invalid=Url ارائه شده نامعتبر است. شما باید از تمام اجزای Url صحیح گزیر بزنید. mirror_address_protocol_invalid=نشانی ارائه شده غیرمعتبر است. فقط استفاده از http(s):// یا git:// می‌تواند قرینه شوند. mirror_last_synced=آخرین همگام سازی @@ -704,7 +710,6 @@ archive.pull.nocomment=این مخزن بایگانی شده. شما نمی تو form.name_reserved=یک مخزن با نام '%s' از قبل وجود دارد. form.name_pattern_not_allowed=الگوی %s در نام مخزن مجاز نیست. -need_auth=مجوز همسان‌سازی migrate_items=مولفه های مهاجرت migrate_items_wiki=دانشنامه migrate_items_milestones=نقاط عطف @@ -1106,6 +1111,7 @@ issues.review.un_resolve_conversation=مکالمه را بعنوان حل نشد issues.review.resolved_by=علامت گذاری این مکالمه بعنوان حل شده issues.assignee.error=به دلیل خطای غیرمنتظره همه تکالیف اضافه نشد. + pulls.desc=نمایش تقاضای واکشی ها و بازبینی های کد. pulls.new=ایجاد تقاضای واکشی pulls.compare_changes=تقاضای واکشی جدید @@ -1130,7 +1136,6 @@ pulls.merged_as=تقاضای واکشی شروع شذع با عنوان %s برای جلو گیری کردن از تقاضای واکشی که موقع ادغام دچار تصادم میشود.` -pulls.cannot_merge_work_in_progress=این تقاضای واکشی در حال پردازش است. پیشوند %s پس از آنکه آماده شد از عنوانش حذف میشود pulls.data_broken=این تقاضای واکشی به دلیل از دست رفتن اطلاعات انشعاب با شکست مواجه شد. pulls.files_conflicted=این تقاضای واکشی دارای تغییراتی است که با شاخه هدف تداخل دارد. pulls.is_checking=در حال پردازش تداخل در ادغام می‌باشد. لطفاً لحظاتی بعد امتحان کنید. @@ -1454,7 +1459,6 @@ settings.event_issue_comment=دیدگاه های مسئله settings.event_issue_comment_desc=نظر در مسئله ایجاد شد، ویرایش شد یا حذف شد. settings.event_pull_request=تقاضای واکشی settings.branch_filter=صافی شاخه -settings.branch_filter_desc=لیست سفید برای درج در شاخه، سازنده شاخه و حذف کننده رخداد ها، به عنوان الگوی قطره‌ای تعریف میشوند. اگر خالی یا * باشد. رخداد های تمامی شاخه های گزارش می شوند. به github.com/gobwas/glob برای مستندات املای آن نگاه کنید. مثال ها: master, {master,release*}. settings.active=فعال settings.active_helper=اطلاعات درباره کشیده شدن ماشه رویدادها به این نشانی هوک تحت وب ارسال خواهد شد. settings.add_hook_success=یک هوک تحت وب جدید افزوده شده است. @@ -1895,7 +1899,6 @@ auths.host=میزبان auths.port=درگاه (پورت) auths.bind_dn=DN متصل شده auths.bind_password=اتصال گذرواژه -auths.bind_password_helper=هشدار: این گذرواژه به صورت متن خام ذخیره می شود. استفاده حساب های کاربری فقط-خواندنی امکان پذیر هست. auths.user_base=پایگاه جستجوی کاربر auths.user_dn=کاربر DN auths.attribute_username=ویژگی نام کاربری diff --git a/options/locale/locale_fi-FI.ini b/options/locale/locale_fi-FI.ini index c1e555e9bb19..0101e6ea8ae2 100644 --- a/options/locale/locale_fi-FI.ini +++ b/options/locale/locale_fi-FI.ini @@ -20,6 +20,7 @@ user_profile_and_more=Profiili ja asetukset… signed_in_as=Kirjautuneena käyttäjänä enable_javascript=Tämä sivusto toimii paremmin JavaScriptillä. toc=Sisällysluettelo +licenses=Lisenssit username=Käyttäjätunnus email=Sähköpostiosoite @@ -69,6 +70,7 @@ issues=Ongelmat milestones=Merkkipaalut cancel=Peruuta +save=Tallenna add=Lisää add_all=Lisää kaikki remove=Poista @@ -81,6 +83,7 @@ loading=Ladataan… error404=Sivu, jota yrität nähdä, joko ei löydy tai et ole oikeutettu katsomaan sitä. + [error] occurred=Tapahtui virhe @@ -189,6 +192,7 @@ view_home=Näytä %s search_repos=Etsi repo… +show_private=Yksityinen issues.in_your_repos=Repoissasi @@ -250,7 +254,6 @@ openid_connect_desc=Valittu OpenID-osoite on tuntematon. Liitä se uuteen tiliin openid_register_title=Luo uusi tili openid_register_desc=Valittu OpenID-osoite on tuntematon. Liitä se uuteen tiliin täällä. openid_signin_desc=Anna OpenID-osoitteesi. Esimerkiksi: https://anne.me, bob.openid.org.cn tai gnusocial.net/carry. -disable_forgot_password_mail=Tilin palautus ei ole käytössä. Ota yhteyttä sivuston ylläpitäjään. email_domain_blacklisted=Et voi rekisteröityä sähköpostiosoittellasi. authorize_application=Valtuuta sovellus authorize_redirect_notice=Sinut uudelleen ohjataan osoitteeseen %s jos valtuutat tämän sovelluksen. @@ -262,11 +265,18 @@ authorization_failed_desc=Käyttöoikeuden varmistus epäonnistui virheellisen p sspi_auth_failed=SSPI todennus epäonnistui [mail] + activate_account=Ole hyvä ja aktivoi tilisi + activate_email=Vahvista sähköpostiosoitteesi + +register_notify=Tervetuloa Giteaan + reset_password=Palauta käyttäjätili + register_success=Rekisteröinti onnistui -register_notify=Tervetuloa Giteaan + + @@ -488,6 +498,7 @@ delete_account_title=Poista käyttäjätili email_notifications.enable=Ota käyttöön sähköpostiilmoitukset + [repo] owner=Omistaja repo_name=Repon nimi @@ -523,7 +534,6 @@ template.issue_labels=Ongelmien tunnisteet -need_auth=Kloonauksen valtuutus migrate_items=Siirrettävät asiat migrate_items_wiki=Wiki migrate_items_milestones=Merkkipaalut @@ -710,6 +720,8 @@ issues.label_modify=Muokkaa tunniste issues.label_deletion=Poista tunniste issues.label.filter_sort.alphabetically=Aakkosjärjestyksessä issues.label.filter_sort.reverse_alphabetically=Käänteisessä aakkosjärjestyksessä +issues.label.filter_sort.by_size=Pienin koko +issues.label.filter_sort.reverse_by_size=Suurin koko issues.num_participants=%d osallistujaa issues.subscribe=Tilaa issues.unsubscribe=Lopeta tilaus @@ -750,6 +762,7 @@ issues.dependency.remove_info=Poistä tämä riippuvuus issues.review.self.approval=Et voi hyväksyä omia vetopyyntöjä. issues.review.approve=hyväksyi nämä muutokset %s + pulls.new=Uusi pull pyyntö pulls.filter_branch=Suodata branch pulls.no_results=Tuloksia ei löytynyt. @@ -823,6 +836,8 @@ activity.title.issues_created_by=%s luonnut %s activity.closed_issue_label=Suljettu activity.new_issues_count_1=Uusi ongelma activity.new_issues_count_n=Uutta ongelmaa +activity.new_issue_label=Avoinna +activity.unresolved_conv_label=Auki activity.published_release_label=Julkaistu activity.git_stats_file_1=%d tiedosto activity.git_stats_file_n=%d tiedostoa @@ -871,6 +886,8 @@ settings.delete_notices_1=- Tätä toimintoa EI VOI peruuttaa m settings.update_settings_success=Repon asetukset on päivitetty. settings.delete_collaborator=Poista settings.search_user_placeholder=Etsi käyttäjä… +settings.teams=Tiimit +settings.add_team=Lisää tiimi settings.add_webhook=Lisää webkoukku settings.webhook.test_delivery=Testitoimitus settings.webhook.request=Pyyntö @@ -882,6 +899,7 @@ settings.githook_edit_desc=Jos koukku ei ole käytössä, esitellään esimerkki settings.githook_name=Koukun nimi settings.githook_content=Koukun sisältö settings.update_githook=Päivitys koukku +settings.http_method=HTTP-menetelmä settings.secret=Salaus settings.slack_username=Käyttäjätunnus settings.slack_icon_url=Kuvakkeen URL @@ -948,6 +966,7 @@ diff.file_image_width=Leveys diff.file_image_height=Korkeus diff.file_byte_size=Koko diff.comment.add_review_comment=Lisää kommentti +diff.comment.reply=Vastaa diff.review.comment=Kommentoi diff.review.approve=Hyväksy @@ -1060,6 +1079,7 @@ teams.admin_permission_desc=Tämä tiimi myöntää jäsenille Ylläpito teams.repositories=Tiimin repot teams.add_nonexistent_repo=Repo jota yrität lisätä ei ole vielä olemassa, ole hyvä ja luo se ensin. teams.members.none=Ei jäseniä tässä tiimissä. +teams.all_repositories=Kaikki repot [admin] dashboard=Kojelauta @@ -1111,6 +1131,7 @@ dashboard.gc_times=GC aikoja users.user_manage_panel=Tilien hallinta users.new_account=Luo käyttäjätili users.name=Käyttäjätunnus +users.full_name=Kokonimi users.activated=Aktivoitu users.admin=Ylläpito users.repos=Repot @@ -1276,6 +1297,7 @@ monitor.desc=Kuvaus monitor.start=Alkamisaika monitor.execute_time=Suoritusaika monitor.queues=Jonot +monitor.queue=Jono: %s monitor.queue.name=Nimi monitor.queue.type=Tyyppi diff --git a/options/locale/locale_fr-FR.ini b/options/locale/locale_fr-FR.ini index 0ca14e0e9ae0..b5a64da26831 100644 --- a/options/locale/locale_fr-FR.ini +++ b/options/locale/locale_fr-FR.ini @@ -83,6 +83,7 @@ add=Ajouter add_all=Tout Ajouter remove=Retirer remove_all=Tout Retirer +edit=Éditer write=Écrire preview=Aperçu @@ -91,11 +92,16 @@ loading=Chargement… step1=Étape 1: step2=Étape 2: +error=Erreur error404=La page que vous essayez d'atteindre n'existe pas ou vous n'êtes pas autorisé à la voir. +never=Jamais + [error] occurred=Une erreur est survenue report_message=Si vous êtes sûr qu'il s'agit d'un bug de Gitea, cherchez s’il existe un ticket sur GitHub et ouvrez-en un nouveau si nécessaire. +missing_csrf=Requête incorrecte: aucun jeton CSRF présent +invalid_csrf=Requête incorrecte: jeton CSRF invalide [startpage] app_desc=Un service Git auto-hébergé sans prise de tête @@ -299,7 +305,8 @@ openid_connect_desc=L'URI OpenID choisie est inconnue. Associez-le à un nouveau openid_register_title=Créer un nouveau compte openid_register_desc=L'URI OpenID choisie est inconnue. Associez-le à un nouveau compte ici. openid_signin_desc=Veuillez entrer votre URI OpenID. Par exemple: https://anne.me, bob.openid.org.cn ou gnusocial.net/charles. -disable_forgot_password_mail=La récupération de compte est désactivée. Veuillez contacter l'administrateur du site. +disable_forgot_password_mail=La récupération du compte est désactivée car aucune adresse courriel n'est configurée. Veuillez contacter l'administrateur de votre site. +disable_forgot_password_mail_admin=La récupération du compte est disponible uniquement lorsque l'adresse courriel est configurée. Veuillez configurer l'adresse courriel pour activer la récupération du compte. email_domain_blacklisted=Vous ne pouvez pas vous enregistrer avec votre adresse e-mail. authorize_application=Autoriser l'application authorize_redirect_notice=Vous serez redirigé vers %s si vous autorisez cette application. @@ -313,19 +320,40 @@ password_pwned=Le mot de passe que vous avez choisi est sur une Migrer le dépôt. owner=Propriétaire @@ -721,7 +749,7 @@ mirror_prune_desc=Supprimer les références externes obsolètes mirror_interval=Intervalle de synchronisation ('h', 'm', et 's' sont des unités valides), 0 pour désactiver. mirror_interval_invalid=L'intervalle de synchronisation est invalide. mirror_address=Cloner depuis une URL -mirror_address_desc=Mettez les identifiants requis dans la section Autorisation de Clonage. +mirror_address_desc=Insérez tous les identifiants requis dans la section Autorisation. mirror_address_url_invalid=L'url fournie est invalide. Vous devez échapper tous les composants de l'url correctement. mirror_address_protocol_invalid=L'url fournie est invalide. Seuls les protocoles http(s):// ou git:// peuvent être la source du miroir. mirror_lfs=Stockage de fichiers volumineux (LFS) @@ -729,6 +757,9 @@ mirror_lfs_desc=Activer la mise en miroir des données LFS. mirror_lfs_endpoint=Point d'accès LFS mirror_lfs_endpoint_desc=La synchronisation tentera d'utiliser l'url de clonage pour déterminer le serveur LFS. Vous pouvez également spécifier un point d'accès personnalisé si les données LFS du dépôt sont stockées ailleurs. mirror_last_synced=Dernière synchronisation +mirror_password_placeholder=(Aucune modification) +mirror_password_blank_placeholder=(Non défini) +mirror_password_help=Modifiez le nom d'utilisateur pour effacer un mot de passe enregistré. watchers=Observateurs stargazers=Fans forks=Bifurcations @@ -781,7 +812,7 @@ form.reach_limit_of_creation_n=Vous avez déjà atteint la limite de %d dépôts form.name_reserved=Le dépôt "%s" a un nom réservé. form.name_pattern_not_allowed="%s" n'est pas autorisé dans un nom de dépôt. -need_auth=Autorisations de clonage +need_auth=Autorisation migrate_options=Options de migration migrate_service=Service de migration migrate_options_mirror_helper=Ce dépôt sera un miroir @@ -815,11 +846,19 @@ migrated_from_fake=Migré de %[1]s migrate.migrate=Migrer depuis %s migrate.migrating=Migration de %s ... migrate.migrating_failed=La migration de %s a échoué. +migrate.migrating_failed.error=Erreur: %s migrate.github.description=Migration de données depuis Github.com ou Github Enterprise. migrate.git.description=Migration ou Miroir des données git depuis des services Git migrate.gitlab.description=Migration des données depuis GitLab.com ou d'un serveur gitlab hébergé. migrate.gitea.description=Migration des données depuis Gitea.com ou un serveur Gitea hébergé soi-même. migrate.gogs.description=Migration de données depuis notabug.org ou un autre serveur Gogs auto-hébergé. +migrate.migrating_git=Migration des données Git +migrate.migrating_topics=Migration des sujets +migrate.migrating_milestones=Migration des jalons +migrate.migrating_labels=Migration des étiquettes +migrate.migrating_releases=Migration des versions +migrate.migrating_issues=Migration des tickets +migrate.migrating_pulls=Migration des demandes d'ajout mirror_from=miroir de forked_from=bifurqué depuis @@ -852,6 +891,7 @@ branch=Branche tree=Aborescence clear_ref=`Effacer la référence actuelle` filter_branch_and_tag=Filtrer une branche ou un tag +find_tag=Rechercher un tag branches=Branches tags=Tags issues=Tickets @@ -1264,8 +1304,8 @@ issues.review.dismissed_label=Rejeté issues.review.left_comment=laisser un commentaire issues.review.content.empty=Vous devez laisser un commentaire indiquant le(s) changement(s) demandé(s). issues.review.reject=a requis les changements %s -issues.review.wait=a demandé une révision pour %s -issues.review.add_review_request=demande de révision de %s %s +issues.review.wait=a été sollicité pour une révision %s +issues.review.add_review_request=a demandé une révision de %s %s issues.review.remove_review_request=a supprimé la demande de révision pour %s %s issues.review.remove_review_request_self=a refusé la revue %s issues.review.pending=En attente @@ -1282,6 +1322,9 @@ issues.review.resolved_by=marquer cette conversation comme résolue issues.assignee.error=Tous les assignés n'ont pas été ajoutés en raison d'une erreur inattendue. issues.reference_issue.body=Corps +compare.compare_base=base +compare.compare_head=comparer + pulls.desc=Activer les demandes de fusion et la revue de code. pulls.new=Nouvelle demande d'ajout pulls.compare_changes=Nouvelle demande de fusion @@ -1309,7 +1352,7 @@ pulls.manually_merged_as=La demande d'ajout a été fusionnée manuellement en t pulls.is_closed=La demande de fusion a été fermée. pulls.has_merged=La pull request a été fusionnée. pulls.title_wip_desc=`Préfixer le titre par %s pour empêcher cette demande d'ajout d'être fusionnée par erreur.` -pulls.cannot_merge_work_in_progress=Cette demande d'ajout est marquée comme en cours de chantier. Retirer le préfixe %s du titre quand elle sera prête +pulls.remove_prefix=Enlever le préfixe %s pulls.data_broken=Cette demande de fusion est impossible par manque d'informations de bifurcation. pulls.files_conflicted=Cette demande d'ajout contient des modifications en conflit avec la branche ciblée. pulls.is_checking=Vérification des conflits de fusion en cours. Réessayez dans quelques instants. @@ -1535,6 +1578,9 @@ settings.hooks=Déclencheurs Web settings.githooks=Déclencheurs Git settings.basic_settings=Paramètres de base settings.mirror_settings=Réglages Miroir +settings.mirror_settings.direction=Direction +settings.mirror_settings.last_update=Dernière mise à jour +settings.mirror_settings.push_mirror.remote_url=URL du dépôt distant Git settings.sync_mirror=Synchroniser maintenant settings.mirror_sync_in_progress=La synchronisation est en cours. Revenez dans une minute. settings.email_notifications.enable=Activer les notifications par e-mail @@ -1543,6 +1589,7 @@ settings.email_notifications.disable=Désactiver les notifications par e-mail settings.email_notifications.submit=Définir la préférence e-mail settings.site=Site Web settings.update_settings=Valider +settings.branches.update_default_branch=Changer la Branche par Défaut settings.advanced_settings=Paramètres avancés settings.wiki_desc=Activer le wiki du dépôt settings.use_internal_wiki=Utiliser le wiki interne @@ -1722,7 +1769,6 @@ settings.event_pull_request_review_desc=Demande d'ajout approvée, rejetée ou c settings.event_pull_request_sync=Demande d'ajout synchronisée settings.event_pull_request_sync_desc=Demande d'ajout synchronisée. settings.branch_filter=Filtre de branche -settings.branch_filter_desc=Liste blanche pour les évènements de poussage, de création et de suppression de branche, spécifiés par un motif de développement. Si ce champ est vide ou vaut *, ces évènements sont rapportés pour toutes les branches . Voir la documentation pour la syntaxe sur github.com/gobwas/glob. Exemples : master, {master,release*}. settings.active=Actif settings.active_helper=Les informations sur les événements déclenchés seront envoyées à cette url de Webhook. settings.add_hook_success=Nouveau Webhook ajouté. @@ -1792,7 +1838,6 @@ settings.dismiss_stale_approvals_desc=Quand de nouvelles révisions qui changent settings.require_signed_commits=Exiger des révisions signées settings.require_signed_commits_desc=Rejeter les pushs vers cette branche s’ils ne sont pas signés ou vérifiables. settings.protect_protected_file_patterns=Motifs des fichiers protégés (séparés par un point-virgule '\;') : -settings.protect_protected_file_patterns_desc=Fichiers protégés dont la modification directe est interdite même si l’utilisateur a les droits d’ajouter, éditer ou supprimer des fichiers dans cette branche. Plusieurs motifs peuvent être séparés par un point-virgule ('\;'). Consultez la documentation de github.com/gobwas/glob sur la syntaxe des motifs. Exemples: .drone.yml, /docs/**/*.txt. settings.add_protected_branch=Activer la protection settings.delete_protected_branch=Désactiver la protection settings.update_protect_branch_success=La protection de branche à été mise à jour pour la branche "%s". @@ -1905,6 +1950,7 @@ diff.image.overlay=Superposition releases.desc=Suivi des versions et des téléchargements. release.releases=Versions release.detail=Détails de la version +release.tags=Tags release.new_release=Nouvelle version release.draft=Brouillon release.prerelease=Pré-publication @@ -1929,8 +1975,10 @@ release.delete_release=Supprimer cette version release.delete_tag=Supprimer le tag release.deletion=Supprimer cette version release.deletion_success=Cette livraison a été supprimée. +release.deletion_tag_success=L'étiquette a été supprimée. release.tag_name_already_exist=Une version avec ce nom de balise existe déjà. release.tag_name_invalid=Le nom de balise est invalide. +release.tag_already_exist=Ce nom d'étiquette existe déjà. release.downloads=Téléchargements release.download_count=Télécharger: %s @@ -2097,8 +2145,12 @@ dashboard.operation_switch=Basculer dashboard.operation_run=Exécuter dashboard.clean_unbind_oauth=Effacer les connexions OAuth associées dashboard.clean_unbind_oauth_success=Toutes les connexions OAuth associées ont été supprimées. +dashboard.cron.finished=Tâche planifiée : %[1]s a terminé +dashboard.delete_repo_archives=Supprimer toutes les archives des dépôts (ZIP, TAR.GZ, etc..) +dashboard.delete_repo_archives.started=Tâche de suppression de toutes les archives de dépôts démarrée. dashboard.delete_missing_repos=Supprimer tous les dépôts dont les fichiers Git sont manquants dashboard.delete_generated_repository_avatars=Supprimer les avatars de dépôt générés +dashboard.repo_health_check=Vérifier l'état de santé de tous les dépôts dashboard.check_repo_stats=Voir les statistiques de tous les dépôts dashboard.archive_cleanup=Supprimer les archives des vieux dépôts dashboard.git_gc_repos=Collecter les déchets des dépôts @@ -2135,6 +2187,7 @@ dashboard.total_gc_time=Pause GC dashboard.total_gc_pause=Pause GC dashboard.last_gc_pause=Dernière Pause GC dashboard.gc_times=Nombres de GC +dashboard.delete_old_actions=Supprimer toutes les anciennes actions de la base de données users.user_manage_panel=Gestion du compte utilisateur users.new_account=Créer un compte @@ -2171,6 +2224,7 @@ users.delete_account=Supprimer cet utilisateur users.still_own_repo=Cet utilisateur possède un ou plusieurs dépôts. Veuillez les supprimer ou les transférer à un autre utilisateur. users.still_has_org=Cet utilisateur est membre d'une organisation. Veuillez le retirer de toutes les organisations dont il est membre au préalable. users.deletion_success=Le compte a été supprimé. +users.reset_2fa=Réinitialiser l'authentification à deux facteurs emails.email_manage_panel=Gestion des courriels des utilisateurs emails.primary=Principale @@ -2192,6 +2246,7 @@ orgs.members=Membres orgs.new_orga=Nouvelle organisation repos.repo_manage_panel=Gestion des dépôts +repos.unadopted=Dépôts non adoptés repos.owner=Propriétaire repos.name=Nom repos.private=Privé @@ -2203,6 +2258,7 @@ repos.size=Taille systemhooks=Rappels système +systemhooks.desc=Les Webhooks font automatiquement des requêtes HTTP POST à un serveur lorsque certains événements Gitea se déclenchent. Les Webhooks définis ici agiront sur tous les dépots du système, donc veuillez prendre en compte les implications en termes de performances que cela peut avoir. Lire la suite dans le guide des Webhooks. systemhooks.add_webhook=Ajouter un rappel système systemhooks.update_webhook=Mettre à jour un rappel système @@ -2221,7 +2277,6 @@ auths.host=Hôte auths.port=Port auths.bind_dn=Bind DN auths.bind_password=Bind mot de passe -auths.bind_password_helper=Attention: ce mot de passe est stocké en clair. Veuillez utiliser, si possible, un compte avec des droits limités en lecture seule. auths.user_base=Utilisateur Search Base auths.user_dn=Utilisateur DN auths.attribute_username=Attribut nom d'utilisateur @@ -2378,6 +2433,7 @@ config.mailer_use_sendmail=Utiliser Sendmail config.mailer_sendmail_path=Chemin d’accès à Sendmail config.mailer_sendmail_args=Arguments supplémentaires pour Sendmail config.mailer_sendmail_timeout=Délai d’attente de Sendmail +config.test_email_placeholder=E-mail (ex: test@example.com) config.send_test_mail=Envoyer un e-mail de test config.test_mail_failed=Impossible d'envoyer un e-mail de test à '%s' : %v config.test_mail_sent=Un e-mail de test à été envoyé à '%s'. @@ -2505,6 +2561,7 @@ notices.delete_selected=Supprimé les éléments sélectionnés notices.delete_all=Supprimer toutes les notifications notices.type=Type notices.type_1=Dépôt +notices.type_2=Tâche notices.desc=Description notices.op=Opération notices.delete_success=Les informations systèmes ont été supprimées. diff --git a/options/locale/locale_hu-HU.ini b/options/locale/locale_hu-HU.ini index 0b2d224e9479..eb672bcf0f1c 100644 --- a/options/locale/locale_hu-HU.ini +++ b/options/locale/locale_hu-HU.ini @@ -19,6 +19,9 @@ create_new=Létrehozás… user_profile_and_more=Profil és beállítások... signed_in_as=Bejelentkezve mint enable_javascript=Ez az oldal jobban működik JavaScript-tel. +toc=Tartalomjegyzék +licenses=Licencek +return_to_gitea=Vissza a Gitea-hoz username=Felhasználónév email=E-mail cím @@ -50,6 +53,8 @@ new_migrate=Új migráció new_mirror=Új tükör new_fork=Új másolat new_org=Új szervezet +new_project=Új projekt +new_project_board=Új projekt tábla manage_org=Szervezetek kezelése admin_panel=Rendszergazdai felület account_settings=Fiók beállítások @@ -70,6 +75,7 @@ issues=Hibajegyek milestones=Mérföldkövek cancel=Mégse +save=Mentés add=Hozzáadás add_all=Összes hozzáadása remove=Eltávolítás @@ -79,9 +85,12 @@ write=Írás preview=Előnézet loading=Betöltés… +step1=1. lépés: +step2=2. lépés: error404=Az elérni kívánt oldal vagy nem létezik, vagy nincs jogosultsága a megtekintéséhez. + [error] occurred=Probléma lépett fel report_message=Ha biztos benne, hogy ez egy Gitea hiba, keressen a problémára a GitHub-on és hozzon létre új hibajelentést, ha szükséges. @@ -90,6 +99,7 @@ report_message=Ha biztos benne, hogy ez egy Gitea hiba, keressen a problémára app_desc=Fájdalommentes, saját gépre telepíthető Git szolgáltatás install=Könnyen telepíthető platform=Keresztplatformos +platform_desc=A Gitea minden platformon fut, ahol a Go fordíthat: Windows, macOS, Linux, ARM, stb. Válassza azt, amelyet szereti! lightweight=Könnyűsúlyú license=Nyílt forráskódú @@ -201,7 +211,12 @@ my_mirrors=Tükreim view_home=Nézet %s search_repos=Tároló keresés… +show_archived=Archivált +show_private=Privát +show_both_private_public=Publikus és privát mutatása +show_only_private=Csak privát mutatása +show_only_public=Csak publikus mutatása issues.in_your_repos=A tárolóidban @@ -224,6 +239,7 @@ register_helper_msg=Van már felhasználói fiókja? Jelentkezzen be! social_register_helper_msg=Van már felhasználói fiókja? Csatlakoztassa most! disable_register_prompt=Regisztráció le van tiltva. Kérjük, lépjen kapcsolatba az oldal adminisztrátorával. disable_register_mail=Ki van kapcsolva a visszaigazoló e-mail küldése a regisztrációnál. +remember_me=Eszköz megjegyzése forgot_password_title=Elfelejtett jelszó forgot_password=Elfelejtette a jelszavát? sign_up_now=Szeretne bejelentkezni? Regisztráljon most. @@ -266,7 +282,6 @@ openid_connect_desc=A kiválasztott OpenID URI ismeretlen. Itt társíthatja egy openid_register_title=Új fiók létrehozása openid_register_desc=A kiválasztott OpenID URI ismeretlen. Itt társíthatja egy új fiókkal. openid_signin_desc=Adja meg az OpenID URI-jét. Például https://anne.me, bob.openid.org.cn vagy gnusocial.net/carry. -disable_forgot_password_mail=A felhasználó visszaállítás le van tiltva. Kérjük, lépjen kapcsolatba az oldal adminisztrátorával. email_domain_blacklisted=Ezzel az email címmel nem regisztrálhat. authorize_application=Alkalmazás engedélyezése authorize_redirect_notice=Ha engedélyezi ezt az alkalmazást, akkor átirányításra kerül a %s címre. @@ -276,11 +291,18 @@ authorization_failed=Az engedélyezés nem sikerült sspi_auth_failed=SSPI hitelesítés sikertelen [mail] + activate_account=Kérjük aktiválja a fiókját + activate_email=E-mail cím megerősítése + +register_notify=A Gitea üdvözli + reset_password=Fiókjának visszaállítása + register_success=Sikeres regisztráció -register_notify=A Gitea üdvözli + + @@ -366,6 +388,7 @@ repositories=Tárolók activity=Nyilvános tevékenységek followers=Követők starred=Csillagozott tárolók +projects=Projektek following=Követve follow=Követés unfollow=Követés törlése @@ -409,6 +432,7 @@ continue=Folytatás cancel=Mégsem language=Nyelv ui=Téma +privacy=Adatvédelem lookup_avatar_by_mail=Avatar mutatása email cím alapján federated_avatar_lookup=Összevont profilkép keresés @@ -467,11 +491,11 @@ add_new_key=SSH kulcs hozzáadása add_new_gpg_key=GPG kulcs hozzáadása ssh_key_been_used=Ezt az SSH kulcsot már hozzáadták a ehhez a szerverhez. gpg_key_id_used=Ilyen azonosítóval már létezik nyilvános GPG kulcs. -gpg_no_key_email_found=Ez a GPG kulcs nem használható egyik email címeddel sem. subkeys=Alkulcsok key_id=Kulcs ID key_name=Kulcs neve key_content=Tartalom +principal_content=Tartalom add_key_success=A SSH kulcsod sikeresen hozzáadva: '%s' add_gpg_key_success=A GPG kulcsod sikeresen hozzáadva: '%s' delete_key=Eltávolítás @@ -503,6 +527,7 @@ new_token_desc=A tokent használó alkalmazásoknak teljes hozzáférése van a token_name=Token neve generate_token=Token generálása generate_token_success=Új token létrehozva. Másold le most, mivel többször nem fog megjelenni. +generate_token_name_duplicate=A %s nevet már használja egy alkalmazás. Válassz kérlek más nevet. delete_token=Törlés access_token_deletion=Hozzáférési Token Törlése access_token_deletion_desc=Egy token törlésével visszavonja a hozzáférést a fiókjához az ezt használó alkalmazásoktól. Folytatja? @@ -540,6 +565,7 @@ twofa_is_enrolled=A fiókja jelenleg használ kétlépcsős hit twofa_not_enrolled=A fiókja jelenleg nem használ kétlépcsős hitelesítést. twofa_disable=Kétlépcsős hitelesítés letiltása twofa_scratch_token_regenerate=Kaparós kód újragenerálása +twofa_enroll=Kétlépcsős hitelesítés használata twofa_disable_note=A kétlépcsős azonosítás szükség esetén letiltható. twofa_disable_desc=A kétlépcsős hitelesítés letiltása a fiókot kevésbé biztonságossá teszi. Folytatható? twofa_disabled=Kétlépcsős hitelesítés letiltva. @@ -570,6 +596,7 @@ email_notifications.onmention=Email küldése csak megjelölés esetén email_notifications.disable=Email értesítés kikapcsolása email_notifications.submit=E-mail beállítások megadása + [repo] owner=Tulajdonos repo_name=Tároló neve @@ -615,9 +642,17 @@ reactions_more=és további %d language_other=Egyéb +desc.private=Privát +desc.public=Nyilvános +desc.private_template=Privát sablon +desc.public_template=Sablon +desc.internal=Belső +desc.archived=Archivált template.items=Sablon elemek template.git_content=Git tartalom (alapértelmezett branch) +template.git_hooks=Git Hook-ok +template.webhooks=Webhook-ok template.topics=Témák template.avatar=Avatar template.issue_labels=Hibajegy címkék @@ -631,7 +666,6 @@ archive.pull.nocomment=Ez a tároló archíválva van. Nem szólhat hozzá ehhez form.name_reserved=A tárolónév ('%s') a rendszernek van fenntartva. form.name_pattern_not_allowed='%s' minta nem engedélyezett tárolónévben. -need_auth=Hitelesítés másoláshoz migrate_items_wiki=Wiki migrate_items_milestones=Mérföldkövek migrate_items_labels=Címkék @@ -679,11 +713,14 @@ tags=Címkék issues=Hibajegyek pulls=Egyesítési kérések labels=Címkék +org_labels_desc_manage=kezelés milestones=Mérföldkövek commits=Commit-ok commit=Commit +release=Kiadás releases=Kiadások +tag=Címke file_raw=Nyers file_history=Előzmények file_view_raw=Nyers fájl megtekintése @@ -694,6 +731,7 @@ audio_not_supported_in_browser=A böngésző nem támogatja a HTML5 audio tag-et stored_lfs=Git LFS-el eltárolva symbolic_link=Szimbolikus hivatkozás commit_graph=Commit gráf +commit_graph.hide_pr_refs=Pull request-ek elrejtése normal_view=Normál nézet line=sor lines=sor @@ -756,12 +794,17 @@ ext_issues.desc=Külső hibakövető csatlakoztatás. issues.desc=Hibajelentések, feladatok és mérföldkövek elrendezése. +issues.filter_milestones=Mérföldkövek szűrése +issues.filter_labels=Címkék szűrése issues.new=Új hibajegy issues.new.title_empty=A cím nem lehet üres issues.new.labels=Címkék +issues.new.add_labels_title=Címke alkalmazása issues.new.no_label=Nincs címke issues.new.clear_labels=Címkék kiürítése +issues.new.no_items=Nincsenek elemek issues.new.milestone=Mérföldkő +issues.new.add_milestone_title=Mérföldkő beállítása issues.new.no_milestone=Nincs mérföldkő issues.new.clear_milestone=Mérföldkő eltávolítása issues.new.open_milestone=Nyitott mérföldkövek @@ -769,6 +812,8 @@ issues.new.closed_milestone=Lezárt mérföldkövek issues.new.assignees=Megbízottak issues.new.clear_assignees=Megbízottak eltávolítása issues.new.no_assignees=Nincsenek megbízottak +issues.new.no_reviewers=Nincs véleményező +issues.new.add_reviewer_title=Véleményezés kérése issues.no_ref=Nincsen ág/címke megadva issues.create=Hibajegy létrehozása issues.new_label=Új címke @@ -844,6 +889,7 @@ issues.commit_ref_at=`hivatkozott erre a hibajegyre egy commit-ból Jelentkezz be hogy csatlakozz a beszélgetéshez. issues.edit=Szerkesztés issues.cancel=Mégsem @@ -932,6 +978,7 @@ issues.review.show_outdated=Elavultak mutatása issues.review.hide_outdated=Elavultak elrejtése issues.assignee.error=Nem minden megbízott lett hozzáadva egy nem várt hiba miatt. + pulls.new=Egyesítési kérés pulls.compare_changes=Új egyesítési kérés pulls.compare_base=egyesítés ide @@ -1152,6 +1199,8 @@ settings.add_team_duplicate=A csapat már rendelkezik a tárolóval settings.add_team_success=A csapatnak most van hozzáférése a tárolóhoz. settings.remove_team_success=A csapat hozzáférése a tárolóhoz törölve lett. settings.add_webhook=Webhook hozzáadása +settings.webhook_deletion=Webhook eltávolítása +settings.webhook_deletion_success=A webhook el lett távolítva. settings.webhook.test_delivery=Küldés Kipróbálása settings.webhook.request=Kérés settings.webhook.response=Válasz @@ -1252,6 +1301,7 @@ diff.show_unified_view=Egyesített nézet diff.stats_desc=%d fájl változott, egészen pontosan %d új sor hozzáadva és %d régi sor törölve diff.bin=BINáris diff.view_file=Fájl megtekintése +diff.file_byte_size=Méret diff.file_suppressed=A különbségek nem kerülnek megjelenítésre, mivel a fájl túl nagy diff.too_many_files=Nem az összes módosított fájl került megjelenítésre, mert túl sok fájl változott diff.comment.placeholder=Hozzászólás létrehozása @@ -1275,6 +1325,7 @@ release.cancel=Mégse release.publish=Kiadás közzététele release.save_draft=Piszkozat mentése release.deletion_success=A kiadás törölve. +release.deletion_tag_success=A cimke törölve lett. release.tag_name_invalid=Ez a címkenév érvénytelen. release.downloads=Letöltések release.download_count=Letöltések: %s @@ -1494,7 +1545,6 @@ auths.host=Kiszolgáló auths.port=Port auths.bind_dn=Bind DN auths.bind_password=Bind Jelszó -auths.bind_password_helper=Figyelem: Ez a jelszó egyszerű szövegben van tárolva. Ha lehetséges használjon csak olvasható fiókot. auths.user_base=Felhasználókeresés alapja (BaseDN) auths.user_dn=Felhasználói DN auths.attribute_username=Felhasználónév attribútum @@ -1650,6 +1700,7 @@ config.session_life_time=Munkamenet Élettartama config.https_only=Csak HTTPS config.cookie_life_time=Süti Élettartam +config.picture_config=Kép és Avatár Konfiguráció config.picture_service=Kép Szolgáltatás config.disable_gravatar=Gravatar Kikapcsolása config.enable_federated_avatar=Összevont profilkép lekérés engedélyezése @@ -1670,12 +1721,14 @@ config.log_config=Naplózási Beállítások config.log_mode=Naplózási Módja config.disabled_logger=Letiltva config.access_log_template=Sablon +config.xorm_log_sql=SQL naplózása monitor.cron=Ütemezett Feladatok monitor.name=Név monitor.schedule=Ütemezés monitor.next=Legközelebb monitor.previous=Legutóbb +monitor.execute_times=Végrehajtások monitor.process=Futó Folyamatok monitor.desc=Leírás monitor.start=Kezdés Időpontja @@ -1699,6 +1752,7 @@ notices.delete_selected=Kiválasztottak Törlése notices.delete_all=Minden Értesítés Törlése notices.type=Típus notices.type_1=Tároló +notices.type_2=Feladat notices.desc=Leírás notices.op=Op. notices.delete_success=A rendszer-értesítések törölve lettek. @@ -1717,6 +1771,7 @@ merge_pull_request=`végrehajtott egy egyesítési kérést: %s delete_tag=címke %[2]s törölve innen: %[3]s delete_branch=ág %[2]s törölve innen: %[3]s +compare_branch=Összehasonlítás compare_commits=%d commit összehasonlítása compare_commits_general=Commitok összehasonlítása diff --git a/options/locale/locale_id-ID.ini b/options/locale/locale_id-ID.ini index 2c229468fdfc..1055109d491a 100644 --- a/options/locale/locale_id-ID.ini +++ b/options/locale/locale_id-ID.ini @@ -19,6 +19,7 @@ create_new=Buat… user_profile_and_more=Profil dan Pengaturan… signed_in_as=Masuk sebagai enable_javascript=Situs web ini bekerja lebih baik dengan JavaScript. +toc=Daftar Isi username=Nama Pengguna email=Alamat Email @@ -70,6 +71,7 @@ issues=Masalah milestones=Tonggak cancel=Batal +save=Simpan add=Tambah add_all=Tambah Semua remove=Buang @@ -81,6 +83,7 @@ loading=Memuat… + [error] [startpage] @@ -266,7 +269,6 @@ openid_connect_desc=OpenID URI yang dipilih tak dikenal. Asosiasikan dengan akun openid_register_title=Buat akun baru openid_register_desc=OpenID URI yang dipilih tak dikenal. Asosiasikan dengan akun baru disini. openid_signin_desc=Masukkan URI OpenID Anda. Misalnya: https://anne.me, bob.openid.org.cn, atau gnusocial.net/carry. -disable_forgot_password_mail=Pemulihan akun ditiadakan. Hubungi admin situs Anda. email_domain_blacklisted=Anda tidak dapat mendaftar dengan alamat email. authorize_application=Izinkan aplikasi authorize_redirect_notice=Anda akan dialihkan ke %s apabila Anda mengizinkan aplikasi ini. @@ -278,11 +280,18 @@ authorization_failed_desc=Otorisasi gagal oleh karena kami mendeteksi permintaan sspi_auth_failed=Autentikasi SSPI gagal [mail] + activate_account=Silakan aktifkan akun anda + activate_email=Verifikasi alamat surel anda + +register_notify=Selamat Datang di Gitea + reset_password=Pulihkan akun Anda + register_success=Pendaftaran berhasil -register_notify=Selamat Datang di Gitea + + @@ -471,7 +480,6 @@ add_new_key=Tambahkan Kunci SSH add_new_gpg_key=Tambahkan Kunci GPG ssh_key_been_used=Kunci SSH ini telah ditambahkan ke peladen. gpg_key_id_used=Kunci publik GPG dengan ID yang sama sudah ada. -gpg_no_key_email_found=Kunci GPG ini tidak dapat digunakan dengan alamat surel apapun yang terasosiasi dengan akun Anda. subkeys=Subkunci key_id=ID Kunci key_name=Nama Kunci @@ -588,6 +596,7 @@ email_notifications.enable=Aktifkan Pemberitahuan Surel email_notifications.disable=Nonaktifkan Email Notifikasi email_notifications.submit=Pasang Pengaturan Email + [repo] owner=Pemilik repo_name=Nama Repositori @@ -639,7 +648,6 @@ archive.pull.nocomment=Repositori ini diarsipkan. Anda tidak dapat mengomentari form.name_reserved=Nama repositori '%s' dicadangkan. form.name_pattern_not_allowed=Pola '%s' tidak diperbolehkan dalam nama repositori. -need_auth=Otorisasi Kloning migrate_items=Ihwal Migrasi migrate_items_wiki=Wiki migrate_repo=Migrasi Repositori @@ -840,6 +848,7 @@ issues.add_time_history=`tambah menghabiskan waktu %s` issues.add_time_hours=Jam issues.add_time_minutes=Menit + pulls.new=Permintaan Tarik Baru pulls.filter_branch=Penyaringan cabang pulls.no_results=Hasil tidak ditemukan. diff --git a/options/locale/locale_it-IT.ini b/options/locale/locale_it-IT.ini index ce52cd394dc7..2f0bff1cae8b 100644 --- a/options/locale/locale_it-IT.ini +++ b/options/locale/locale_it-IT.ini @@ -93,6 +93,7 @@ step2=Passo 2: error404=La pagina che stai cercando di raggiungere non esiste oppure non sei autorizzato a visualizzarla. + [error] occurred=Si è verificato un errore report_message=Se sei sicuro che sia un bug di Gitea, cerca il problema su GitHub e apri una nuova segnalazione se necessario. @@ -240,6 +241,7 @@ users=Utenti organizations=Organizzazioni search=Cerca code=Codice +search.fuzzy=Fuzzy search.match=Corrispondenze repo_no_results=Nessuna repository corrispondente. user_no_results=Nessun utente corrispondente. @@ -298,7 +300,6 @@ openid_connect_desc=L'URI OpenID scelto è sconosciuto. Qui puoi associarlo a un openid_register_title=Crea Nuovo Account openid_register_desc=L'URI OpenID scelto è sconosciuto. Qui puoi associarlo a un nuovo account. openid_signin_desc=Inserisci il tuo URI OpenID. Ad esempio: https://anne.me, bob.openid.org.cn o gnusocial.net/carry. -disable_forgot_password_mail=La reimpostazione della password è disabilitata. Si prega di contattare l'amministratore del sito. email_domain_blacklisted=Non è possibile registrarsi con il proprio indirizzo email. authorize_application=Autorizza applicazione authorize_redirect_notice=Verrai reindirizzato a %s se autorizzi questa applicazione. @@ -312,11 +313,18 @@ password_pwned=La password che hai scelto è in una lista mirror @@ -825,6 +835,7 @@ commits=Commit commit=Commit release=Rilascio releases=Rilasci +tag=Etichetta file_raw=Originale file_history=Cronologia file_view_source=Visualizza sorgente @@ -946,6 +957,7 @@ projects.board.edit_title=Nuovo Nome Della Scheda projects.board.new_title=Nuovo Nome Della Scheda projects.board.new_submit=Invia projects.board.new=Nuova Scheda +projects.board.set_default=Imposta come predefinito projects.board.delete=Elimina Scheda projects.board.deletion_desc=L'eliminazione di una scheda di progetto sposta tutti i problemi correlati a 'Uncategorized'. Continuare? projects.open=Apri @@ -1024,6 +1036,7 @@ issues.filter_type.all_issues=Tutti i problemi issues.filter_type.assigned_to_you=Assegnati a te issues.filter_type.created_by_you=Creati da te issues.filter_type.mentioning_you=Che ti riguardano +issues.filter_type.review_requested=Richiesta revisione issues.filter_sort=Ordina issues.filter_sort.latest=Più recenti issues.filter_sort.oldest=Più vecchi @@ -1207,6 +1220,7 @@ issues.review.un_resolve_conversation=Segnala la conversazione come non risolta issues.review.resolved_by=ha contrassegnato questa conversazione come risolta issues.assignee.error=Non tutte le assegnazioni sono state aggiunte a causa di un errore imprevisto. + pulls.desc=Attiva pull request e revisioni di codice. pulls.new=Nuova Pull Request pulls.compare_changes=Nuova Pull Request @@ -1228,10 +1242,10 @@ pulls.reopen_to_merge=Riapri questa pull request per effettuare l'unione. pulls.cant_reopen_deleted_branch=Questa pull request non può essere riaperta perché il branch è stato eliminato. pulls.merged=Unito pulls.merged_as=La pull request è stata unita come %[2]s. +pulls.manually_merged=Unito manualmente pulls.is_closed=La pull request è stata chiusa. pulls.has_merged=La pull request è stata unita. pulls.title_wip_desc=`Inizia il titolo con %s per evitare che la pull request venga unita accidentalmente.` -pulls.cannot_merge_work_in_progress=Questa pull request è contrassegnata come un lavoro in corso. Rimuovi il prefisso %s dal titolo quando è pronta pulls.data_broken=Questa pull request è rovinata a causa di informazioni mancanti del fork. pulls.files_conflicted=Questa pull request ha modifiche in conflitto con il branch di destinazione. pulls.is_checking=Verifica dei conflitti di merge in corso. Riprova tra qualche istante. @@ -1423,6 +1437,7 @@ activity.git_stats_deletion_n=%d cancellazioni search=Ricerca search.search_repo=Ricerca repository +search.fuzzy=Fuzzy search.results=Risultati della ricerca per "%s" in %s settings=Impostazioni @@ -1596,7 +1611,6 @@ settings.event_pull_request_label=Pull Request etichettata settings.event_pull_request_label_desc=Etichette Pull request aggiornate o cancellate. settings.event_pull_request_sync_desc=Pull request sincronizzata. settings.branch_filter=Filtro branch -settings.branch_filter_desc=Whitelist branch per gli eventi di push, creazione e cancellazione del ramo, specificati con un pattern globale. Se vuoto o *, gli eventi per tutti i rami vengono segnalati. Vedi github.com/gobwas/glob documentazione per la sintassi. Esempi: master, {master,release*}. settings.active=Attivo settings.active_helper=Le informazioni sugli eventi innescati saranno inviate a questo URL del webhook. settings.add_hook_success=Il webhook è stato aggiunto. @@ -1765,10 +1779,12 @@ diff.protected=Protetto releases.desc=Tenere traccia di versioni e download del progetto. release.releases=Rilasci +release.detail=Dettagli rilascio release.new_release=Nuovo Rilascio release.draft=Bozza release.prerelease=Pre-Rilascio release.stable=Stabile +release.compare=Confronta release.edit=modifica release.source_code=Codice Sorgente release.new_subheader=Le release organizzano le versioni del progetto. @@ -2096,7 +2112,6 @@ auths.host=Host auths.port=Porta auths.bind_dn=Binda DN auths.bind_password=Binda Password -auths.bind_password_helper=Attenzione: La password è memorizzata in testo normale. Se possibile, utilizzare un account di sola lettura. auths.user_base=Base ricerca utente auths.user_dn=DN dell'utente auths.attribute_username=Attributo nome utente diff --git a/options/locale/locale_ja-JP.ini b/options/locale/locale_ja-JP.ini index 3ecdea674174..86c92dc93557 100644 --- a/options/locale/locale_ja-JP.ini +++ b/options/locale/locale_ja-JP.ini @@ -83,6 +83,7 @@ add=追加 add_all=すべて追加 remove=除去 remove_all=すべて除去 +edit=編集 write=書き込み preview=プレビュー @@ -91,11 +92,16 @@ loading=読み込み中… step1=ステップ 1: step2=ステップ 2: +error=エラー error404=アクセスしようとしたページは存在しないか、閲覧が許可されていません。 +never=無し + [error] occurred=エラーが発生しました report_message=Giteaのバグが疑われる場合は、GitHubでIssueを検索して、見つからなければ新しいIssueを作成してください。 +missing_csrf=不正なリクエスト: CSRFトークンが不明です +invalid_csrf=不正なリクエスト: CSRFトークンが無効です [startpage] app_desc=自分で立てる、超簡単 Git サービス @@ -299,7 +305,8 @@ openid_connect_desc=選択したOpenID URIは未登録です。 ここで新し openid_register_title=アカウント新規作成 openid_register_desc=選択したOpenID URIは未登録です。 ここで新しいアカウントと関連付けます。 openid_signin_desc=あなたのOpenID URIを入力してください。 例: https://anne.me、bob.openid.org.cn、nusocial.net/carry -disable_forgot_password_mail=アカウント回復機能は無効になっています。 サイト管理者にお問い合わせください。 +disable_forgot_password_mail=メール送信設定が無いためアカウントの回復は無効になっています。 サイト管理者にお問い合わせください。 +disable_forgot_password_mail_admin=アカウントの回復はメール送信が設定済みの場合だけ使用できます。 アカウントの回復を有効にするにはメール送信を設定してください。 email_domain_blacklisted=あなたのメールアドレスでは登録することはできません。 authorize_application=アプリケーションを許可 authorize_redirect_notice=このアプリケーションを許可すると %s にリダイレクトします。 @@ -313,19 +320,64 @@ password_pwned=あなたが選択したパスワードは、過去の情報漏 password_pwned_err=HaveIBeenPwnedへのリクエストを完了できませんでした [mail] +view_it_on=%s で見る +link_not_working_do_paste=開かないですか? コピーしてブラウザーに貼り付けてみてください。 +hi_user_x=こんにちは、%s さん。 + activate_account=あなたのアカウントをアクティベートしてください。 +activate_account.title=%s さん、アカウントをアクティベートしてください +activate_account.text_1=こんにちは、%[1]s さん。 %[2]s へのご登録ありがとうございます! +activate_account.text_2=あなたのアカウントを有効化するため、%s以内に次のリンクをクリックしてください: + activate_email=メール アドレスを確認します +activate_email.title=%s さん、メールアドレス確認をお願いします +activate_email.text=あなたのメールアドレスを確認するため、%s以内に次のリンクをクリックしてください: + +register_notify=Giteaへようこそ +register_notify.title=%[1]s さん、%[2]s にようこそ +register_notify.text_1=これは %s への登録確認メールです! +register_notify.text_2=あなたはユーザー名 %s でログインできるようになりました。 +register_notify.text_3=このアカウントがあなたに作成されたものであれば、最初にパスワードを設定してください。 + reset_password=アカウントを回復 +reset_password.title=%s さん、あなたのアカウントの回復がリクエストされました +reset_password.text=あなたのアカウントを回復するには、%s以内に次のリンクをクリックしてください: + register_success=登録が完了しました -register_notify=Giteaへようこそ + +issue_assigned.pull=リポジトリ %[3]s で @%[1]s さんが、あなたをプルリクエスト %[2]s の担当者にしました。 +issue_assigned.issue=リポジトリ %[3]s で @%[1]s さんが、あなたを課題 %[2]s の担当者にしました。 + +issue.x_mentioned_you=@%s さんが、あなたにメンションしました: +issue.action.force_push=%[1]s さんが %[2]s に強制プッシュしました。(%[3]s から %[4]s へ) +issue.action.push_1=@%[1]s さんが %[2]s にコミット%[3]d件をプッシュしました。 +issue.action.push_n=@%[1]s さんが %[2]s にコミット%[3]d件をプッシュしました。 +issue.action.close=@%[1]s さんが #%[2]d をクローズしました。 +issue.action.reopen=@%[1]s さんが #%[2]d を再オープンしました。 +issue.action.merge=@%[1]s さんが #%[2]d を %[3]s にマージしました。 +issue.action.approve=@%[1]s さんがプルリクエストを承認しました。 +issue.action.reject=@%[1]s さんがプルリクエストに変更を要請しました。 +issue.action.review=@%[1]s さんがプルリクエストにコメントしました。 +issue.action.review_dismissed=@%[1]s さんが、プルリクエストに対する %[2]s さんの最新レビューを棄却しました。 +issue.action.ready_for_review=@%[1]s さんが、プルリクエストをレビュー可能な状態にしました。 +issue.action.new=@%[1]s さんが #%[2]d を作成しました。 +issue.in_tree_path=%s: release.new.subject=%[2]s の %[1]s がリリースされました +release.new.text=@%[1]s さんが %[3]s で %[2]s をリリースしました +release.title=タイトル: %s +release.note=リリースノート: +release.downloads=ダウンロード: +release.download.zip=ソースコード (ZIP) +release.download.targz=ソースコード (TAR.GZ) repo.transfer.subject_to=%s が "%s" を %s に移転しようとしています repo.transfer.subject_to_you=%s が "%s" をあなたに移転しようとしています repo.transfer.to_you=あなた +repo.transfer.body=承認または拒否するには %s を開きます。 もしくは単に無視してもかまいません。 repo.collaborator.added.subject=%s が %s にあなたを追加しました +repo.collaborator.added.text=あなたは次のリポジトリの共同作業者に追加されました: [modal] yes=はい @@ -366,6 +418,7 @@ email_error=`は有効なメールアドレスではありません。` url_error=`は有効なURLではありません。` include_error=`は文字列 '%s' を含んでいる必要があります。` glob_pattern_error=`のglobパターンが不正です: %s.` +regex_pattern_error=`の正規表現パターンが不正です: %s.` unknown_error=不明なエラー: captcha_incorrect=CAPTCHAコードが正しくありません。 password_not_match=パスワードが一致しません。 @@ -542,7 +595,6 @@ ssh_key_been_used=このSSHキーは既にサーバーに追加されていま ssh_key_name_used=同じ名前のSSHキーが既にアカウントに存在しています。 ssh_principal_been_used=このプリンシパルは既にサーバーに追加されています。 gpg_key_id_used=同じIDを持つGPG公開鍵が既に存在しています。 -gpg_no_key_email_found=このGPGキーは、あなたのアカウントに関連付けられたどのメールアドレスでも使用できません。 subkeys=サブキー key_id=キーID key_name=キー名 @@ -673,6 +725,14 @@ email_notifications.onmention=メンションのみメール通知 email_notifications.disable=メール通知無効 email_notifications.submit=メール設定を保存 +visibility=ユーザーの公開範囲 +visibility.public=パブリック +visibility.public_tooltip=すべてのユーザーに表示します +visibility.limited=限定 +visibility.limited_tooltip=ログインしているユーザーにのみ表示します +visibility.private=プライベート +visibility.private_tooltip=組織のメンバーにのみ表示します + [repo] new_repo_helper=リポジトリには、プロジェクトのすべてのファイルとリビジョン履歴が入ります。 すでにほかの場所にありますか? リポジトリを移行 もどうぞ。 owner=オーナー @@ -723,7 +783,7 @@ mirror_prune_desc=不要になった古いリモートトラッキング参照 mirror_interval=ミラー間隔 (有効な時間の単位は'h'、'm'、's')。 自動的な同期を無効にする場合は0。 mirror_interval_invalid=ミラー間隔が不正です。 mirror_address=クローンするURL -mirror_address_desc=必要な資格情報は「クローン時の認証」セクションに設定してください。 +mirror_address_desc=必要な資格情報は「認証」セクションに設定してください。 mirror_address_url_invalid=入力したURLは無効です。 URLの構成要素はすべて正しくエスケープする必要があります。 mirror_address_protocol_invalid=入力したURLは無効です。 ミラーできるのは、http(s):// または git:// の場所からだけです。 mirror_lfs=Large File Storage (LFS) @@ -731,6 +791,9 @@ mirror_lfs_desc=LFS データのミラーリングを有効にする。 mirror_lfs_endpoint=LFS エンドポイント mirror_lfs_endpoint_desc=同期するときは、クローンURLをもとにLFSサーバーを決定しようとします。 リポジトリのLFSデータがほかの場所に保存されている場合は、独自のエンドポイントを指定することができます。 mirror_last_synced=前回の同期 +mirror_password_placeholder=(変更なし) +mirror_password_blank_placeholder=(未設定) +mirror_password_help=ユーザー名を変更すると保存されているパスワードは消去されます。 watchers=ウォッチャー stargazers=スターゲイザー forks=フォーク @@ -747,6 +810,7 @@ delete_preexisting_label=削除 delete_preexisting=既存のファイルを削除 delete_preexisting_content=%s のファイルを削除します delete_preexisting_success=%s の未登録ファイルを削除しました +blame_prior=この変更より前のBlameを表示 transfer.accept=転送を承認 transfer.accept_desc="%s" に転送 @@ -783,7 +847,7 @@ form.reach_limit_of_creation_n=すでにあなたが作成できるリポジト form.name_reserved=リポジトリ名 '%s' は予約されています。 form.name_pattern_not_allowed='%s' の形式はリポジトリ名に使用できません。 -need_auth=クローン時の認証 +need_auth=認証 migrate_options=移行オプション migrate_service=移行するサービス migrate_options_mirror_helper=このリポジトリをミラーにする @@ -817,11 +881,19 @@ migrated_from_fake=%[1]sから移行 migrate.migrate=%s からの移行 migrate.migrating=%s から移行しています ... migrate.migrating_failed=%s からの移行が失敗しました。 +migrate.migrating_failed.error=エラー: %s migrate.github.description=Github.com または Github Enterprise からデータを移行します。 migrate.git.description=Gitサービスからgitデータを移行またはミラーを作成します migrate.gitlab.description=GitLab.com またはセルフホストのgitlabサーバーからデータを移行します。 migrate.gitea.description=Gitea.comまたはセルフホストのGiteaサーバーからデータを移行します。 migrate.gogs.description=notabug.org や、他のセルフホストのGogsサーバーからデータを移行します。 +migrate.migrating_git=Gitデータ移行中 +migrate.migrating_topics=トピック移行中 +migrate.migrating_milestones=マイルストーン移行中 +migrate.migrating_labels=ラベル移行中 +migrate.migrating_releases=リリース移行中 +migrate.migrating_issues=課題移行中 +migrate.migrating_pulls=プルリクエスト移行中 mirror_from=ミラー元 forked_from=フォーク元 @@ -854,6 +926,7 @@ branch=ブランチ tree=ツリー clear_ref=`現在の参照をクリア` filter_branch_and_tag=ブランチまたはタグを絞り込み +find_tag=タグを検索 branches=ブランチ tags=タグ issues=課題 @@ -1284,6 +1357,9 @@ issues.review.resolved_by=がこの会話を解決済みにしました issues.assignee.error=予期しないエラーにより、一部の担当者を追加できませんでした。 issues.reference_issue.body=内容 +compare.compare_base=基準 +compare.compare_head=比較 + pulls.desc=プルリクエストとコードレビューの有効化。 pulls.new=新しいプルリクエスト pulls.compare_changes=新規プルリクエスト @@ -1311,7 +1387,10 @@ pulls.manually_merged_as=プルリクエストは タイトルの頭に %s を付けます。` -pulls.cannot_merge_work_in_progress=このプルリクエストはWork in Progressとマークされています。 マージできる状態になったら、タイトルから %s を消してください。 +pulls.cannot_merge_work_in_progress=このプルリクエストは作業中(WIP)としてマーキングされています。 +pulls.still_in_progress=まだ作業中? +pulls.add_prefix=先頭に %s を追加 +pulls.remove_prefix=先頭の %s を除去 pulls.data_broken=このプルリクエストは、フォークの情報が見つからないため壊れています。 pulls.files_conflicted=このプルリクエストは、ターゲットブランチと競合する変更を含んでいます。 pulls.is_checking=マージのコンフリクトを確認中です。 少し待ってからもう一度実行してください。 @@ -1537,6 +1616,15 @@ settings.hooks=Webhook settings.githooks=Gitフック settings.basic_settings=基本設定 settings.mirror_settings=ミラー設定 +settings.mirror_settings.docs=他のリポジトリへの自動的なプッシュ/プルを行うよう、プロジェクトを設定します。 ブランチ、タグ、コミットが自動的に同期されます。 リポジトリをミラーするには? +settings.mirror_settings.mirrored_repository=同期するリポジトリ +settings.mirror_settings.direction=方向 +settings.mirror_settings.direction.pull=プル +settings.mirror_settings.direction.push=プッシュ +settings.mirror_settings.last_update=最終更新 +settings.mirror_settings.push_mirror.none=プッシュミラーは設定されていません +settings.mirror_settings.push_mirror.remote_url=リモートGitリポジトリのURL +settings.mirror_settings.push_mirror.add=プッシュミラーを追加 settings.sync_mirror=今すぐ同期 settings.mirror_sync_in_progress=ミラー同期を実行しています。 しばらくあとでまた確認してください。 settings.email_notifications.enable=メール通知有効 @@ -1545,6 +1633,7 @@ settings.email_notifications.disable=メール通知無効 settings.email_notifications.submit=メール設定を保存 settings.site=Webサイト settings.update_settings=設定を更新 +settings.branches.update_default_branch=デフォルトブランチを更新 settings.advanced_settings=拡張設定 settings.wiki_desc=Wikiを有効にする settings.use_internal_wiki=ビルトインのWikiを使用する @@ -1601,6 +1690,7 @@ settings.transfer_form_title=確認のためリポジトリ名を入力: settings.transfer_in_progress=現在進行中の転送があります。このリポジトリを別のユーザーに転送したい場合はキャンセルしてください。 settings.transfer_notices_1=- 個人ユーザーに移転すると、あなたはリポジトリへのアクセス権を失います。 settings.transfer_notices_2=- あなたが所有(または共同で所有)している組織に移転すると、リポジトリへのアクセス権は維持されます。 +settings.transfer_notices_3=- プライベートリポジトリを個人ユーザーに移転した場合は、最低限そのユーザーが読み取り権限を持つよう設定されます (必要に応じて権限が変更されます)。 settings.transfer_owner=新しいオーナー settings.transfer_perform=転送を実行 settings.transfer_started=このリポジトリは転送のためにマークされており、「%s」からの確認を待っています @@ -1724,7 +1814,7 @@ settings.event_pull_request_review_desc=プルリクエストの承認・拒否 settings.event_pull_request_sync=プルリクエストの同期 settings.event_pull_request_sync_desc=プルリクエストが同期されたとき。 settings.branch_filter=ブランチ フィルター -settings.branch_filter_desc=プッシュ、ブランチ作成、ブランチ削除のイベントを通知するブランチを、globパターンで指定するホワイトリストです。 空か*のときは、すべてのブランチのイベントを通知します。 文法は github.com/gobwas/glob を参照してください。 例: master{master,release*} +settings.branch_filter_desc=プッシュ、ブランチ作成、ブランチ削除のイベントを通知するブランチを、globパターンで指定するホワイトリストです。 空か*のときは、すべてのブランチのイベントを通知します。 文法については github.com/gobwas/glob を参照してください。 例: master{master,release*} settings.active=有効 settings.active_helper=トリガーとなったイベントに関する情報が、このWebhookのURLに送信されます。 settings.add_hook_success=Webhookを追加しました。 @@ -1794,7 +1884,7 @@ settings.dismiss_stale_approvals_desc=プルリクエストの内容を変える settings.require_signed_commits=コミット署名必須 settings.require_signed_commits_desc=署名されていない場合、または署名が検証できなかった場合は、このブランチへのプッシュを拒否します。 settings.protect_protected_file_patterns=保護されるファイルのパターン (セミコロン'\;'で区切る): -settings.protect_protected_file_patterns_desc=保護されたファイルは、このブランチにファイルを追加・編集・削除する権限を持つユーザーであっても、そのままでは変更することはできません。 複数のパターンはセミコロン('\;')で区切ります。 パターンの文法については github.com/gobwas/glob を参照してください。 例: .drone.yml, /docs/**/*.txt +settings.protect_protected_file_patterns_desc=保護されたファイルは、このブランチにファイルを追加・編集・削除する権限を持つユーザーであっても、直接変更することができなくなります。 セミコロン('\;')で区切って複数のパターンを指定できます。 パターンの文法については github.com/gobwas/glob を参照してください。 例: .drone.yml, /docs/**/*.txt settings.add_protected_branch=保護を有効にする settings.delete_protected_branch=保護を無効にする settings.update_protect_branch_success=ブランチ '%s' の保護を更新しました。 @@ -1813,6 +1903,16 @@ settings.choose_branch=ブランチを選択… settings.no_protected_branch=保護しているブランチはありません。 settings.edit_protected_branch=編集 settings.protected_branch_required_approvals_min=必要な承認数は負の数にできません。 +settings.tags=タグ +settings.tags.protection=タグの保護 +settings.tags.protection.pattern=タグ名パターン +settings.tags.protection.allowed=許可 +settings.tags.protection.allowed.users=許可するユーザー +settings.tags.protection.allowed.teams=許可するチーム +settings.tags.protection.allowed.noone=なし +settings.tags.protection.create=タグを保護 +settings.tags.protection.none=タグは保護されていません。 +settings.tags.protection.pattern.description=ひとつのタグ名か、複数のタグにマッチするglobパターンまたは正規表現を使用できます。 詳しくは タグの保護ガイド をご覧ください。 settings.bot_token=Botトークン settings.chat_id=チャットID settings.matrix.homeserver_url=ホームサーバー URL @@ -1826,6 +1926,7 @@ settings.archive.success=リポジトリをアーカイブしました。 settings.archive.error=リポジトリのアーカイブ設定でエラーが発生しました。 詳細はログを確認してください。 settings.archive.error_ismirror=ミラーのリポジトリはアーカイブできません。 settings.archive.branchsettings_unavailable=ブランチ設定は、アーカイブリポジトリでは使用できません。 +settings.archive.tagsettings_unavailable=タグ設定は、アーカイブリポジトリでは使用できません。 settings.unarchive.button=アーカイブ解除 settings.unarchive.header=このリポジトリをアーカイブ解除 settings.unarchive.text=リポジトリのアーカイブを解除すると、コミット、プッシュ、新規の課題やプルリクエストを受け付けるよう元に戻されます。 @@ -1885,6 +1986,7 @@ diff.file_image_width=幅 diff.file_image_height=高さ diff.file_byte_size=サイズ diff.file_suppressed=ファイル差分が大きすぎるため省略します +diff.file_suppressed_line_too_long=長すぎる行があるためファイル差分は表示されません diff.too_many_files=変更されたファイルが多すぎるため、一部のファイルは表示されません diff.comment.placeholder=コメントを残す diff.comment.markdown_info=Markdownによる書式設定をサポートしています。 @@ -1912,6 +2014,7 @@ release.new_release=新しいリリース release.draft=下書き release.prerelease=プレリリース release.stable=安定版 +release.compare=比較 release.edit=編集 release.ahead.commits=%d件のコミット release.ahead.target=が、このリリース後 %s に追加されています @@ -1938,6 +2041,7 @@ release.deletion_tag_desc=リポジトリからこのタグを削除します。 release.deletion_tag_success=タグを削除しました。 release.tag_name_already_exist=このタグ名のリリースが既に存在します。 release.tag_name_invalid=タグ名が不正です。 +release.tag_name_protected=保護されているタグ名です。 release.tag_already_exist=このタグ名は既に存在します。 release.downloads=ダウンロード release.download_count=ダウンロード数: %s @@ -1969,6 +2073,10 @@ branch.restore=ブランチ '%s' の復元 branch.download=ブランチ '%s' をダウンロード branch.included_desc=このブランチはデフォルトブランチに含まれています branch.included=埋没 +branch.create_new_branch=このブランチをもとに作成します: +branch.confirm_create_branch=ブランチを作成 +branch.new_branch=新しいブランチの作成 +branch.new_branch_from='%s' から新しいブランチを作成 tag.create_tag=タグ %s を作成 tag.create_success=タグ '%s' が作成されました。 @@ -2178,6 +2286,8 @@ dashboard.total_gc_time=GC停止時間の合計 dashboard.total_gc_pause=GC停止時間の合計 dashboard.last_gc_pause=前回のGC停止時間 dashboard.gc_times=GC実行回数 +dashboard.delete_old_actions=データベースから古い操作履歴をすべて削除 +dashboard.delete_old_actions.started=データベースからの古い操作履歴の削除を開始しました。 users.user_manage_panel=ユーザーアカウント管理 users.new_account=ユーザーアカウントを作成 @@ -2273,7 +2383,6 @@ auths.host=ホスト auths.port=ポート auths.bind_dn=バインドDN auths.bind_password=バインドパスワード -auths.bind_password_helper=警告: このパスワードはプレーンテキストで保存されます。 可能であれば読み取り専用アカウントを使用してください。 auths.user_base=ユーザー検索ベース auths.user_dn=ユーザーDN auths.attribute_username=ユーザー名 @@ -2304,6 +2413,7 @@ auths.allowed_domains_helper=すべてのドメインを許可する場合は空 auths.enable_tls=TLS暗号化を有効にする auths.skip_tls_verify=TLS検証を省略 auths.pam_service_name=PAMサービス名 +auths.pam_email_domain=PAM メールドメイン名 (オプション) auths.oauth2_provider=OAuth2プロバイダー auths.oauth2_icon_url=アイコンのURL auths.oauth2_clientID=クライアントID (キー) @@ -2403,6 +2513,7 @@ config.db_path=パス config.service_config=サービス設定 config.register_email_confirm=登録にはメールによる確認が必要 config.disable_register=セルフ登録無効 +config.allow_only_internal_registration=Gitea上での登録のみを許可 config.allow_only_external_registration=外部サービスを使用した登録のみを許可 config.enable_openid_signup=OpenIDを使ったセルフ登録有効 config.enable_openid_signin=OpenIDを使ったサインイン有効 diff --git a/options/locale/locale_ko-KR.ini b/options/locale/locale_ko-KR.ini index c84000bd3d2e..39277bf1c3ae 100644 --- a/options/locale/locale_ko-KR.ini +++ b/options/locale/locale_ko-KR.ini @@ -50,6 +50,8 @@ new_migrate=새 마이그레이션 new_mirror=새로운 미러 new_fork=새 저장소 포크 new_org=새로운 조직 +new_project=새 프로젝트 +new_project_board=새 프로젝트 보드 manage_org=조직 관리 admin_panel=사이트 관리 account_settings=계정 설정 @@ -70,6 +72,7 @@ issues=이슈들 milestones=마일스톤 cancel=취소 +save=저장 add=추가 add_all=모두 추가 remove=삭제 @@ -81,7 +84,9 @@ loading=불러오는 중... + [error] +occurred=오류가 발생했습니다 [startpage] app_desc=편리한 설치형 Git 서비스 @@ -267,11 +272,18 @@ authorization_failed=인증 실패 sspi_auth_failed=SSPI 인증 실패 [mail] + activate_account=계정을 활성화하세요 + activate_email=이메일 주소 확인 + +register_notify=Gitea에 오신것을 환영합니다! + reset_password=계정 복구 + register_success=등록 완료 -register_notify=Gitea에 오신것을 환영합니다! + + @@ -451,7 +463,6 @@ gpg_helper=도움이 필요하세요? GitHub의 설명서를 add_new_key=SSH 키 추가 add_new_gpg_key=GPG 키 추가 gpg_key_id_used=같은 ID의 GPG 공개키가 이미 존재합니다. -gpg_no_key_email_found=이 GPG 키는 귀하의 계정에 연결된 어떠한 이메일 주소로도 사용할 수 없습니다. subkeys=하위 키 key_id=키 ID key_name=키 이름 @@ -545,6 +556,7 @@ delete_account_desc=이 계정을 정말로 삭제하시겠습니까? email_notifications.enable=이메일 알림 켜기 email_notifications.disable=이메일 알림 끄기 + [repo] owner=소유자 repo_name=저장소 이름 @@ -593,7 +605,6 @@ template.topics=토론 주제 form.name_reserved=저장소 이름 '%s'은 예약 되어 있습니다. form.name_pattern_not_allowed='%s' 패턴은 저장소명으로 허용되지 않습니다. -need_auth=클론시 인증 migrate_items_wiki=위키 migrate_items_issues=이슈 migrate_repo=저장소 마이그레이션 @@ -856,6 +867,7 @@ issues.review.reviewers=리뷰어 issues.review.show_outdated=오래된 내역 보기 issues.review.hide_outdated=오래된 내역 숨기기 + pulls.new=새 풀 리퀘스트 pulls.compare_changes=새 풀 리퀘스트 pulls.compare_base=병합하기 diff --git a/options/locale/locale_lv-LV.ini b/options/locale/locale_lv-LV.ini index 718e29f18d6f..f1e802afbcab 100644 --- a/options/locale/locale_lv-LV.ini +++ b/options/locale/locale_lv-LV.ini @@ -83,6 +83,7 @@ add=Pievienot add_all=Pievienot visus remove=Noņemt remove_all=Noņemt visus +edit=Labot write=Rakstīt preview=Priekšskatītījums @@ -91,11 +92,16 @@ loading=Notiek ielāde… step1=Solis 1: step2=Solis 2: +error=Kļūda error404=Lapa, ko vēlaties atvērt, neeksistē vai arī Jums nav tiesības to aplūkot. +never=Nekad + [error] occurred=Radusies kļūda report_message=Ja esat drošs, ka šī ir Gitea kļūda, pirms ziņošanas Gitea problēmās pārliecinieties, ka lietojat jaunāko Gitea versiju un par šādu kļūdu jau nav ziņots. +missing_csrf=Kļūdains pieprasījums: netika saņemts drošības talons +invalid_csrf=Kļūdains pieprasījums: iesūtīts kļūdains drošības talons [startpage] app_desc=Viegli uzstādāms Git serviss @@ -299,7 +305,8 @@ openid_connect_desc=Izvēlētais OpenID konts sistēmā netika atpazīts, bet J openid_register_title=Izveidot jaunu kontu openid_register_desc=Izvēlētais OpenID konts sistēmā netika atpazīts, bet Jūs to varat piesaistīt esošam kontam. openid_signin_desc=Ievadiet savu OpenID URI, piemēram: https://anna.me, peteris.openid.org.lv, gnusocial.net/janis. -disable_forgot_password_mail=Paroles atjaunošanas iespēja ir atslēgta. Sazinieties ar lapas administratoru. +disable_forgot_password_mail=Konta atjaunošana ir atspējota, jo nav uzstādīti e-pasta servera iestatījumi. Sazinieties ar lapas administratoru. +disable_forgot_password_mail_admin=Kontu atjaunošana ir pieejama tikai, ja ir veikta e-pasta servera iestatījumu konfigurēšana. Norādiet e-pasta servera iestatījumus, lai iespējotu kontu atjaunošanu. email_domain_blacklisted=Nav atļauts reģistrēties ar šādu e-pasta adresi. authorize_application=Autorizēt lietotni authorize_redirect_notice=Jūs tiksiet nosūtīts uz %s, ja autorizēsiet šo lietotni. @@ -313,14 +320,64 @@ password_pwned=Ievadītā parole ir %s, + activate_account=Lūdzu, aktivizējiet savu kontu +activate_account.title=%s, aktivizējiet savu kontu +activate_account.text_1=Sveiki %[1]s, esat reģistrējies %[2]s! +activate_account.text_2=Nospiediet uz saites, lai aktivizētu savu kontu lapā %s: + activate_email=Apstipriniet savu e-pasta adresi -reset_password=Atgūt kontu -register_success=Veiksmīga reģistrācija +activate_email.title=%s, apstipriniet savu e-pasta adresi +activate_email.text=Nospiediet uz saites, lai apstiprinātu savu e-pasta adresi lapā %s: + register_notify=Laipni lūdzam Gitea +register_notify.title=%[1]s, esat reģistrējies %[2]s +register_notify.text_1=šis ir reģistrācijas apstiprinājuma e-pasts lapai %s! +register_notify.text_2=Tagad varat autorizēties ar lietotāja vārdu: %s. +register_notify.text_3=Ja šis konts Jums tika izveidots, tad obligāti nomainiet citu paroli. +reset_password=Atgūt kontu +reset_password.title=%s, esat pieprasījis atjaunot savu kontu +reset_password.text=Nospiediet uz saites, lai atjaunotu savu kontu lapā %s: +register_success=Veiksmīga reģistrācija +issue_assigned.pull=@%[1]s piešķīra jums izmaiņu pieprasījumu %[2]s repozitorijā %[3]s. +issue_assigned.issue=@%[1]s piešķīra jums problēmu %[2]s repozitorijā %[3]s. + +issue.x_mentioned_you=@%s pieminēja Jūs: +issue.action.force_push=%[1]s veica piespiedu izmaiņu iesūtīšanu atzarā %[2]s no revīzijas %[3]s uz %[4]s. +issue.action.push_1=@%[1]s iesūtīja %[3]d revīziju atzarā %[2]s +issue.action.push_n=@%[1]s iesūtīja %[3]d revīzijas atzarā %[2]s +issue.action.close=@%[1]s aizvēra #%[2]d. +issue.action.reopen=@%[1]s atkārtoti atvēra #%[2]d. +issue.action.merge=@%[1]s sapludināja #%[2]d atzarā %[3]s. +issue.action.approve=@%[1]s apstiprināja izmaiņu pieprasījumu. +issue.action.reject=@%[1]s pieprasīja izmaiņas šajā izmaiņu pieprasījumā. +issue.action.review=@%[1]s komentēja šo izmaiņu pieprasījumu. +issue.action.review_dismissed=@%[1]s atmeta pēdējo %[2]s recenziju šim izmaiņu pieprasījumam. +issue.action.ready_for_review=@%[1]s atzīmēja šo izmaiņu pieprasījumu, ka tas ir gatavs recenzēšanai. +issue.action.new=@%[1]s izveidoja #%[2]d. +issue.in_tree_path=Ceļā %s: + +release.new.subject=Jauns laidiens %s repozitorijā %s +release.new.text=@%[1]s izveidoja jaunu laidienu %[2]s repozitorijā %[3]s +release.title=Nosaukums: %s +release.note=Piezīmes: +release.downloads=Lejupielādes: +release.download.zip=Izejas kods (ZIP) +release.download.targz=Izejas kods (TAR.GZ) + +repo.transfer.subject_to=%s vēlas pārsūtīt repozitoriju "%s" organizācijai %s +repo.transfer.subject_to_you=%s vēlas Jums pārsūtīt repozitoriju "%s" +repo.transfer.to_you=Jums +repo.transfer.body=Ja vēlaties to noraidīt vai apstiprināt, tad apmeklējiet saiti %s. + +repo.collaborator.added.subject=%s pievienoja Jūs repozitorijam %s +repo.collaborator.added.text=Jūs tikāt pievienots kā līdzstrādnieks repozitorijam: [modal] yes=Jā @@ -360,7 +417,8 @@ max_size_error=` jabūt ne mazāk kā %s simbolu garumā.` email_error=` nav derīga e-pasta adrese.` url_error=` nav korekts URL.` include_error=` ir jāsatur tekstu '%s'.` -glob_pattern_error=` glob izteiksme nav korekta: %s.` +glob_pattern_error=` glob šablons nav korekts: %s.` +regex_pattern_error=` regulārā izteiksme nav korekta: %s.` unknown_error=Nezināma kļūda: captcha_incorrect=Ievadīts nepareizs drošības kods. password_not_match=Izvēlētā parole nesakrīt ar atkārtoti ievadīto. @@ -415,6 +473,7 @@ repositories=Repozitoriji activity=Publiskā aktivitāte followers=Sekotāji starred=Atzīmēti repozitoriji +watched=Vērotie repozitoriji projects=Projekti following=Seko follow=Sekot @@ -536,7 +595,20 @@ ssh_key_been_used=Šī SSH atslēga jau ir pievienota šajā serverī. ssh_key_name_used=SSH atslēga ar šādu nosaukumu šim kontam jau eksistē. ssh_principal_been_used=Šāda identitāte jau ir pievienota šājā serverī. gpg_key_id_used=Publiskā GPG atslēga ar šādu ID jau eksistē. -gpg_no_key_email_found=Jūsu kontam nav piesaistīta neviena no šīs GPG atslēgas e-pasta adresēm. +gpg_no_key_email_found=GPG atslēga neatbilst nevienai Jūsu konta aktivizētajai e-pasta adresei. Šo atslēgu ir iespējams pievienot, veicot, talona parakstīšanu. +gpg_key_matched_identities=Atbilstošās identitātes: +gpg_key_matched_identities_long=Iegultās identitātes šājā atslēgā atbilst sekojošām aktivizētām e-pasta adresēm šim lietotajam. Revīzijas ar atbilstošām e-pasta adresēm var tik pārbaudītas ar šo atslēgu. +gpg_key_verified=Pārbaudītā atslēga +gpg_key_verified_long=Atslēga tika pārbaudīta ar talonu un var tikt izmantota, lai pārbaudītu revīzijas, kas atbilst jebkurai aktivizētai e-pasta adresei šim lietotājam papildus šīs atslēgas atbilstošajām identitātēm. +gpg_key_verify=Pārbaudīt +gpg_invalid_token_signature=Norādītā GPG atslēga, paraksts un talons neatbilst vai talonam ir beidzies derīguma termiņš. +gpg_token_required=Jānorāda paraksts zemāk esošajam talonam +gpg_token=Talons +gpg_token_help=Parakstu ir iespējams uzģenerēt izmantojot komandu: +gpg_token_code=echo "%s" | gpg -a --default-key %s --detach-sig +gpg_token_signature=Tekstuāls GPG paraksts +key_signature_gpg_placeholder=Sākas ar '-----BEGIN PGP SIGNATURE-----' +verify_gpg_key_success=GPG atslēga '%s' ir pārbaudīta. subkeys=Apakšatslēgas key_id=Atslēgas ID key_name=Atslēgas nosaukums @@ -667,6 +739,14 @@ email_notifications.onmention=Tikai, ja esmu pieminēts email_notifications.disable=Nesūtīt paziņojumus email_notifications.submit=Saglabāt sūtīšanas iestatījumus +visibility=Lietotāja redzamība +visibility.public=Publisks +visibility.public_tooltip=Redzams visiem lietotājiem +visibility.limited=Ierobežota +visibility.limited_tooltip=Redzams tikai autorizētajiem lietotājiem +visibility.private=Privāts +visibility.private_tooltip=Redzams tikai organizāciju dalībniekiem + [repo] new_repo_helper=Repozitorijs satur projekta visus failus, tai skaitā to izmaiņu vēsturi. Jau ir pieejams citur? Migrējiet repozitoriju. owner=Īpašnieks @@ -720,7 +800,14 @@ mirror_address=Spoguļa adrese mirror_address_desc=Pieslēgšanās rekvizītus norādiet autorizācijas sadaļā. mirror_address_url_invalid=Norādītais URL nav korekts. Norādiet visas URL daļas korekti. mirror_address_protocol_invalid=Norādītais URL nav korekts. Var spoguļot tikai no http(s):// vai git:// adresēm. +mirror_lfs=Lielu failu glabātuve (LFS) +mirror_lfs_desc=Aktivizēt LFS datu spoguļošanu. +mirror_lfs_endpoint=LFS galapunkts +mirror_lfs_endpoint_desc=Sinhronizācija mēģinās izmantot klonēsanas URL, lai noteiktu LFS serveri. Var norādīt arī citu galapunktu, ja repozitorija LFS dati ir izvietoti citā vietā. mirror_last_synced=Pēdējo reizi sinhronizēts +mirror_password_placeholder=(bez izmaiņām) +mirror_password_blank_placeholder=(nav uzstādīts) +mirror_password_help=Nomainiet lietotāju, lai izdzēstu saglabāto paroli. watchers=Novērotāji stargazers=Zvaigžņdevēji forks=Atdalītie repozitoriji @@ -737,6 +824,7 @@ delete_preexisting_label=Dzēst delete_preexisting=Dzēst jau eksistējošos failus delete_preexisting_content=Dzēst failus direktorijā %s delete_preexisting_success=Dzēst nepārņemtos failus direktorijā %s +blame_prior=Aplūkot vainīgo par izmaiņām pirms šīs revīzijas transfer.accept=Apstiprināt īpašnieka maiņu transfer.accept_desc=Mainīt īpašnieku uz "%s" @@ -773,11 +861,16 @@ form.reach_limit_of_creation_n=Sasniegts Jums noteiktais %d repozitoriju ierobe form.name_reserved=Repozitorija nosaukums '%s' ir jau rezervēts. form.name_pattern_not_allowed=Repozitorija nosaukums '%s' nav atļauts. -need_auth=Nepieciešama autorizācija +need_auth=Autorizācija migrate_options=Migrācijas opcijas migrate_service=Migrācijas serviss migrate_options_mirror_helper=Šis repozitorijs būs spogulis migrate_options_mirror_disabled=Lapas administrators ir atslēdzies iespēju viedot jaunus spoguļus. +migrate_options_lfs=Migrēt LFS failus +migrate_options_lfs_endpoint.label=LFS galapunkts +migrate_options_lfs_endpoint.description=Migrācija mēģinās izmantot attālināto URL, lai noteiktu LFS serveri. Var norādīt arī citu galapunktu, ja repozitorija LFS dati ir izvietoti citā vietā. +migrate_options_lfs_endpoint.description.local=Iespējams norādīt arī servera ceļu. +migrate_options_lfs_endpoint.placeholder=Atstājiet tukšu, lai noteiktu pēc klonēšanas URL migrate_items=Vienības, ko pārņemt migrate_items_wiki=Vikivietni migrate_items_milestones=Atskaites punktus @@ -791,7 +884,10 @@ migrate.clone_address=Klonēšanas adrese migrate.clone_address_desc=Tā var būt HTTP(S) adrese vai Git 'clone' URL eksistējošam repozitorijam migrate.clone_local_path=vai servera lokālais ceļš migrate.permission_denied=Jums nav tiesību importēt lokālu repozitoriju. +migrate.permission_denied_blocked=Nav atļauts veikt importu no bloķētiem serveriem. +migrate.permission_denied_private_ip=Nav atļauts veikt importu no bloķētiem no privātām IP adresēm. migrate.invalid_local_path=Nederīgs lokālais ceļš. Tas neeksistē vai nav direktorija. +migrate.invalid_lfs_endpoint=LFS galapunkts nav korekts. migrate.failed=Migrācija neizdevās: %v migrate.migrate_items_options=Piekļuves talons ir nepieciešams, lai migrētu papildus datus migrated_from=Migrēts no %[2]s @@ -799,11 +895,19 @@ migrated_from_fake=Migrēts no %[1]s migrate.migrate=Migrēt no %s migrate.migrating=Migrācija no %s ... migrate.migrating_failed=Migrācija no %s neizdevās. +migrate.migrating_failed.error=Kļūda: %s migrate.github.description=Migrēt datus no Github.com vai Github Enterprise servera. migrate.git.description=Migrēt vai spoguļot git datus no Git servisiem migrate.gitlab.description=Migrēt datus no Gitlab.com vai cita Gitlab servera. migrate.gitea.description=Migrēt datus no Gitea.com vai cita Gitea servera. migrate.gogs.description=Migrēt datus no notabug.org vai cita Gogs servera. +migrate.migrating_git=Migrē git datus +migrate.migrating_topics=Migrē tēmas +migrate.migrating_milestones=Migrē atskaites punktus +migrate.migrating_labels=Migrē etiķetes +migrate.migrating_releases=Migrē laidienus +migrate.migrating_issues=Migrācijas problēmas +migrate.migrating_pulls=Migrē izmaiņu pieprasījumus mirror_from=spogulis no forked_from=atdalīts no @@ -836,6 +940,7 @@ branch=Atzars tree=Koks clear_ref=`Notīrīt pašreizējo atsauci` filter_branch_and_tag=Filtrēt atzarus vai tagus +find_tag=Atrast tagu branches=Atzari tags=Tagi issues=Problēmas @@ -1105,6 +1210,8 @@ issues.context.edit=Labot issues.context.delete=Dzēst issues.no_content=Vēl nav satura. issues.close_issue=Aizvērt +issues.pull_merged_at=`sapludināja revīziju %[2]s atzarā %[3]s %[4]s` +issues.manually_pull_merged_at=`manuāli sapludināja revīziju %[2]s atzarā %[3]s %[4]s` issues.close_comment_issue=Komentēt un aizvērt issues.reopen_issue=Atvērt atkārtoti issues.reopen_comment_issue=Komentēt un atvērt atkārtoti @@ -1198,6 +1305,7 @@ issues.error_modifying_due_date=Neizdevās izmainīt izpildes termiņu. issues.error_removing_due_date=Neizdevās noņemt izpildes termiņu. issues.push_commit_1=iesūtīja %d revīziju %s issues.push_commits_n=iesūtīja %d revīzijas %s +issues.force_push_codes=`veica piespiedu izmaiņu iesūtīšanu atzarā %[1]s no revīzijas %[2]s uz %[4]s %[6]s` issues.due_date_form=yyyy-mm-dd issues.due_date_form_add=Pievienot izpildes termiņu issues.due_date_form_edit=Labot @@ -1263,6 +1371,9 @@ issues.review.resolved_by=atzīmēja sarunu kā atrisinātu issues.assignee.error=Ne visi atbildīgie tika pievienoti, jo radās neparedzēta kļūda. issues.reference_issue.body=Saturs +compare.compare_base=pamata +compare.compare_head=salīdzināt + pulls.desc=Iespējot izmaiņu pieprasījumus un koda recenzēšanu. pulls.new=Jauns izmaiņu pieprasījums pulls.compare_changes=Jauns izmaiņu pieprasījums @@ -1290,7 +1401,10 @@ pulls.manually_merged_as=Izmaiņu pieprasījums tika sapludināts manuāli ar re pulls.is_closed=Izmaiņu pieprasījums tika aizvērts. pulls.has_merged=Šis izmaiņu pieprasījums tika veiksmīgi sapludināts. pulls.title_wip_desc=`Sāciet virsrakstu ar %s, lai ierobežotu, ka izmaiņu pieprasījums netīšām tiktu sapludināts.` -pulls.cannot_merge_work_in_progress=Šis izmaiņu pieprasījums ir atzīmēts, ka pie tā vēl notiek izstrāde. Noņemiet %s no virsraksta sākuma, kad tas ir pabeigts. +pulls.cannot_merge_work_in_progress=Šis izmaiņu pieprasījums ir atzīmēts, ka pie tā vēl notiek izstrāde. +pulls.still_in_progress=Joprojām notiek izstrāde? +pulls.add_prefix=Pievienot %s prefiksu +pulls.remove_prefix=Noņemt %s prefiksu pulls.data_broken=Izmaiņu pieprasījums ir bojāts, jo dzēsta informācija no atdalītā repozitorija. pulls.files_conflicted=Šīs izmaiņu pieprasījuma izmaiņas konfliktē ar mērķa atzaru. pulls.is_checking=Notiek konfliktu pārbaude, mirkli uzgaidiet un atjaunojiet lapu. @@ -1516,6 +1630,15 @@ settings.hooks=Tīmekļa āķi settings.githooks=Git āķi settings.basic_settings=Pamatiestatījumi settings.mirror_settings=Spoguļa iestatījumi +settings.mirror_settings.docs=Konfigurējiet projektu, lai automātiski iesūtītu un/vai saņemtu izmaiņas uz/no cita repozitorija. Atzari, tagi un revīzijas tiks automātiski sinhronizētas. Kā spoguļot repozitorijus? +settings.mirror_settings.mirrored_repository=Spoguļotais repozitorijs +settings.mirror_settings.direction=Virziens +settings.mirror_settings.direction.pull=Izmaiņu saņemšana +settings.mirror_settings.direction.push=Izmaiņu nosūtīšana +settings.mirror_settings.last_update=Pēdējās izmaiņas +settings.mirror_settings.push_mirror.none=Nav konfigurēts iesūtīšanas spogulis +settings.mirror_settings.push_mirror.remote_url=Git attālinātā repozitorija URL +settings.mirror_settings.push_mirror.add=Pievienot iesūtīšanas spoguli settings.sync_mirror=Sinhronizēt tagad settings.mirror_sync_in_progress=Notiek spoguļa sinhronizācija. Atjaunojiet lapu, lai pārbaudītu atkārtoti, pēc brīža. settings.email_notifications.enable=Iespējot e-pasta paziņojumus @@ -1524,6 +1647,7 @@ settings.email_notifications.disable=Nesūtīt paziņojumus settings.email_notifications.submit=Saglabāt sūtīšanas iestatījumus settings.site=Mājas lapa settings.update_settings=Mainīt iestatījumus +settings.branches.update_default_branch=Atjaunot noklusēto atzaru settings.advanced_settings=Papildu iestatījumi settings.wiki_desc=Iespējot vikivietnes settings.use_internal_wiki=Izmantot iebūvēto vikivietni @@ -1553,6 +1677,7 @@ settings.pulls.allow_rebase_merge_commit=Iespējot pārbāzēšanu sapludinot re settings.pulls.allow_squash_commits=Iespējot saspiešanu sapludinot revīzijas settings.pulls.allow_manual_merge=Iespējot atzīmēt izmaiņu pieprasījumu kā manuāli sapludinātu settings.pulls.enable_autodetect_manual_merge=Iespējot manuālo sapludināšanas noteikšanu (Piezīme: dažos speciālos gadījumos, tas var nostrādāt nekorekti) +settings.pulls.default_delete_branch_after_merge=Pēc noklusējuma dzēst izmaiņu pieprasījuma atzaru pēc sapludināšanas settings.projects_desc=Iespējot repozitorija projektus settings.admin_settings=Administratora iestatījumi settings.admin_enable_health_check=Iespējot veselības pārbaudi (git fsck) šim repozitorijam @@ -1580,6 +1705,7 @@ settings.transfer_form_title=Ievadiet repozitorija nosaukumu, lai apstiprinātu: settings.transfer_in_progress=Pašlaik jau tiek veikta repozitorija īpašnieka maiņa. Atceliet iepriekšējo īpašnieka maiņu, ja vēlaties mainīt uz citu. settings.transfer_notices_1=- Jūs zaudēsiet piekļuvi, ja jaunais īpašnieks ir individuāls lietotājs. settings.transfer_notices_2=- Jūs saglabāsiet piekļuvi, ja jaunais īpašnieks ir organizācija un Jūs esat viens no tās īpašniekiem. +settings.transfer_notices_3=- Ja repozitorijs ir privāts un tas tiks pārsūtīts lietotājam, tad pārliecināties, ka lietotājam ir vismaz skatīšanās tiesības (veiciet nepieciešamās izmaiņas, ja nepieciešams). settings.transfer_owner=Jaunais īpašnieks settings.transfer_perform=Veikt īpašnieka maiņu settings.transfer_started=Šim repozitorijam tiek veikta īpašnieka maiņa un nepieciešams apstiprinājums no "%s" @@ -1703,7 +1829,7 @@ settings.event_pull_request_review_desc=Izmaiņu pieprasījums apstiprināts, no settings.event_pull_request_sync=Izmaiņu pieprasījums sinhronizēts settings.event_pull_request_sync_desc=Izmaiņu pieprasījums sinhronizēts. settings.branch_filter=Atzaru filtrs -settings.branch_filter_desc=Atzaru ierobežojumi izmaiņu iesūtīšanas, zaru izveidošanas vai dzēšanas notikumien, izmantojot, glob izteiksmi. Ja norādīts tukšs vai *, notikumi uz visiem zariem tiks nosūtīti. Skatieties github.com/gobwas/glob pieraksta dokumentāciju. Piemērs: master, {master,release*}. +settings.branch_filter_desc=Atzaru ierobežojumi izmaiņu iesūtīšanas, zaru izveidošanas vai dzēšanas notikumiem, izmantojot, glob šablonu. Ja norādīts tukšs vai *, tiks nosūtīti notikumi no visiem zariem. Skatieties github.com/gobwas/glob pieraksta dokumentāciju. Piemērs: master, {master,release*}. settings.active=Aktīvs settings.active_helper=Informācija par notikumiem tiks nosūtīta uz šo tīmekļa āķa URL. settings.add_hook_success=Tīmekļa āķis tika pievienots. @@ -1773,7 +1899,7 @@ settings.dismiss_stale_approvals_desc=Kad tiek iesūtītas jaunas revīzijas, ka settings.require_signed_commits=Pieprasīt parakstītas revīzijas settings.require_signed_commits_desc=Noraidīt iesūtītās izmaiņas šim atzaram, ja tās nav parakstītas vai nav iespējams pārbaudīt. settings.protect_protected_file_patterns=Aizsargāto failu šablons (vairākus var norādīt atdalot ar semikolu '\;'): -settings.protect_protected_file_patterns_desc=Aizsargātie faili, ko nevar mainīt, pat ja lietotājam ir tiesības veidot jaunus, labot vai dzēst failus šajā atzarā. Vairākus šablons ir iespējams norādīt atdalot tos ar semikolu ('\;'). Sīkāka informācija par šabloniem pieejama github.com/gobwas/glob dokumentācijā. Piemēram, .drone.yml, /docs/**/*.txt. +settings.protect_protected_file_patterns_desc=Aizsargātie faili, ko nevar mainīt, pat ja lietotājam ir tiesības veidot jaunus, labot vai dzēst failus šajā atzarā. Vairākus šablons ir iespējams norādīt atdalot tos ar semikolu ('\;'). Sīkāka informācija par šabloniem pieejama github.com/gobwas/glob dokumentācijā. Piemēram, .drone.yml, /docs/**/*.txt. settings.add_protected_branch=Iespējot aizsargāšanu settings.delete_protected_branch=Atspējot aizsargāšanu settings.update_protect_branch_success=Atzara aizsardzība atzaram '%s' tika saglabāta. @@ -1787,10 +1913,21 @@ settings.block_on_official_review_requests_desc=Sapludināšana nebūs iespējam settings.block_outdated_branch=Bloķēt sapludināšanau, ja izmaiņu pieprasījums ir novecojis settings.block_outdated_branch_desc=Sapludināšana nebūs pieejama, ja atzars būs atpalicis no bāzes atzara. settings.default_branch_desc=Norādiet noklusēto repozitorija atzaru izmaiņu pieprasījumiem un koda revīzijām: +settings.default_merge_style_desc=Noklusētais sapludināšanas veids izmaiņu pieprasījumiem: settings.choose_branch=Izvēlieties atzaru… settings.no_protected_branch=Nav neviena aizsargātā atzara. settings.edit_protected_branch=Labot settings.protected_branch_required_approvals_min=Pieprasīto recenziju skaits nevar būt negatīvs. +settings.tags=Tagi +settings.tags.protection=Tagu aizsargāšana +settings.tags.protection.pattern=Tagu šablons +settings.tags.protection.allowed=Atļauts +settings.tags.protection.allowed.users=Atļauts lietotājiem +settings.tags.protection.allowed.teams=Atļauts komandām +settings.tags.protection.allowed.noone=Nevienam +settings.tags.protection.create=Aizsargāt tagus +settings.tags.protection.none=Nav uzstādīta tagu aizsargāšana. +settings.tags.protection.pattern.description=Var izmantot pilnu nosaukumu, glob šablonu vai regulāro izteiksmi, lai aizsargātu vairākus tagus. Detalizētāk var izlasīt tagu aizsargāšanas pamācībā. settings.bot_token=Bota talons settings.chat_id=Tērzēšanas ID settings.matrix.homeserver_url=Mājas servera URL @@ -1804,6 +1941,7 @@ settings.archive.success=Repozitorijs veiksmīgi arhivēts. settings.archive.error=Arhivējot repozitoriju radās neparedzēta kļūda. Pārbaudiet kļūdu žurnālu, lai uzzinātu sīkāk. settings.archive.error_ismirror=Nav iespējams arhivēt spoguļotus repozitorijus. settings.archive.branchsettings_unavailable=Atzaru iestatījumi nav pieejami, ja repozitorijs ir arhivēts. +settings.archive.tagsettings_unavailable=Tagu iestatījumi nav pieejami, ja repozitorijs ir arhivēts. settings.unarchive.button=Atcelt arhivāciju settings.unarchive.header=Atcelt repozitorija arhivāciju settings.unarchive.text=Atceļot repozitoriju arhivāciju, tam atkal varēs iesūtīt jaunas izmaiņas, kā arī pieteikt problēmas un veidot izmaiņu pieprasījumus. @@ -1855,6 +1993,7 @@ diff.whitespace_ignore_at_eol=Ignorēt atstarpju izmaiņas rindu beigās diff.stats_desc=%d mainītis faili ar %d papildinājumiem un %d dzēšanām diff.stats_desc_file=%d izmaiņas: %d pievienotas un %d dzēstas diff.bin=Binārs +diff.bin_not_shown=Bināro failu nav iespējams attēlot. diff.view_file=Parādīt failu diff.file_before=Pirms diff.file_after=Pēc @@ -1862,6 +2001,7 @@ diff.file_image_width=Platums diff.file_image_height=Augstums diff.file_byte_size=Izmērs diff.file_suppressed=Failā izmaiņas netiks attēlotas, jo tās ir par lielu +diff.file_suppressed_line_too_long=Faila izmaiņas netiek rādītas, jo viena vai vairākas līnijas ir pārāk garas diff.too_many_files=Daži faili netika attēloti, jo izmaiņu fails ir pārāk liels diff.comment.placeholder=Ievadiet komentāru diff.comment.markdown_info=Tiek atbalstīta formatēšana ar Markdown. @@ -1889,6 +2029,7 @@ release.new_release=Jauns laidiens release.draft=Melnraksts release.prerelease=Pirmsizlaides versija release.stable=Stabila +release.compare=Salīdzināt release.edit=labot release.ahead.commits=%d revīzijas release.ahead.target=no %s kopš laidiena publicēšanas @@ -1915,6 +2056,7 @@ release.deletion_tag_desc=Tiks izdzēsts tags no repozitorija. Repozitorija satu release.deletion_tag_success=Tags tika izdzēsts. release.tag_name_already_exist=Laidiens ar šādu taga nosaukumu jau eksistē. release.tag_name_invalid=Nekorekts taga nosaukums. +release.tag_name_protected=Taga nosaukums ir aizsargāts. release.tag_already_exist=Tags ar šādu nosaukumu jau eksistē. release.downloads=Lejupielādes release.download_count=Lejupielādes: %s @@ -1946,6 +2088,10 @@ branch.restore=Atjaunot atzaru '%s' branch.download=Lejupielādēt atzaru '%s' branch.included_desc=Šis atzars ir daļa no noklusēta atzara branch.included=Iekļauts +branch.create_new_branch=Izveidot jaunu atzaru no atzara: +branch.confirm_create_branch=Izveidot atzaru +branch.new_branch=Izveidot jaunu atzaru +branch.new_branch_from=Izveidot jaunu atzaru no '%s' tag.create_tag=Izveidot tagu %s tag.create_success=Tags '%s' tika izveidots. @@ -1955,6 +2101,9 @@ topic.done=Gatavs topic.count_prompt=Nevar pievienot vairāk kā 25 tēmas topic.format_prompt=Tēmai ir jāsākas ar burtu vai ciparu, tā var saturēt domu zīmes ('-') un var būt līdz 35 simboliem gara. +error.csv.too_large=Nevar attēlot šo failu, jo tas ir pārāk liels. +error.csv.unexpected=Nevar attēlot šo failu, jo tas satur neparedzētu simbolu %d. līnijas %d. kolonnā. +error.csv.invalid_field_count=Nevar attēlot šo failu, jo tas satur nepareizu skaitu ar laukiem %d. līnijā. [org] org_name_holder=Organizācijas nosaukums @@ -2152,6 +2301,8 @@ dashboard.total_gc_time=Kopējais GC izpildes laiks dashboard.total_gc_pause=Kopējais GC izpildes laiks dashboard.last_gc_pause=Pedējās GC izpildes laiks dashboard.gc_times=GC reizes +dashboard.delete_old_actions=Dzēst visas darbības no datu bāzes +dashboard.delete_old_actions.started=Uzsākta visu novecojušo darbību dzēšana no datu bāzes. users.user_manage_panel=Lietotāju kontu pārvaldība users.new_account=Izveidot lietotāja kontu @@ -2247,7 +2398,6 @@ auths.host=Resursdators auths.port=Ports auths.bind_dn=Saistīšanas DN auths.bind_password=Saistīšanas parole -auths.bind_password_helper=Brīdinājums: Šī parole tiks glabāta nešifrētā veidā. Ieteicams izmantot kontu ar tikai lasīšanas tiesībām. auths.user_base=Lietotāja pamatnosacījumi auths.user_dn=Lietotāja DN auths.attribute_username=Lietotājvārda atribūts @@ -2278,6 +2428,7 @@ auths.allowed_domains_helper=Atstājiet tukšu, lai atļautu visus domēnus. Lai auths.enable_tls=Iespējot TLS šifrēšanu auths.skip_tls_verify=Izlaist TLS pārbaudi auths.pam_service_name=PAM servisa nosaukums +auths.pam_email_domain=PAM e-pasta domēns (neobligāts) auths.oauth2_provider=OAuth2 pakalpojuma sniedzējs auths.oauth2_icon_url=Ikonas URL auths.oauth2_clientID=Klienta ID (atslēga) @@ -2377,6 +2528,7 @@ config.db_path=Ceļš config.service_config=Pakalpojuma konfigurācija config.register_email_confirm=Reģistrējoties pieprasīt apstiprināt e-pasta adresi config.disable_register=Atspējot lietotāju reģistrāciju +config.allow_only_internal_registration=Atļaut reģistrāciju tikai no Gitea config.allow_only_external_registration=Atļaut reģistrēties tikai ar ārējiem servisiem config.enable_openid_signup=Iespējot reģistrāciju, izmantojot OpenID config.enable_openid_signin=Iespējot OpenID autorizāciju diff --git a/options/locale/locale_ml-IN.ini b/options/locale/locale_ml-IN.ini index 12f066c0f6db..4fb2bdbe8a9c 100644 --- a/options/locale/locale_ml-IN.ini +++ b/options/locale/locale_ml-IN.ini @@ -75,6 +75,7 @@ loading=ലഭ്യമാക്കുന്നു… + [error] [startpage] @@ -249,7 +250,6 @@ openid_connect_desc=തിരഞ്ഞെടുത്ത ഓപ്പൺഐഡ openid_register_title=അംഗത്വമെടുക്കുക openid_register_desc=തിരഞ്ഞെടുത്ത ഓപ്പൺഐഡി യുആർഐ അജ്ഞാതമാണ്. ഇവിടെ നിന്നും ഒരു പുതിയ അക്കൗണ്ടുമായി ബന്ധപ്പെടുത്തുക. openid_signin_desc=നിങ്ങളുടെ OpenID URI നൽകുക. ഉദാഹരണത്തിന്: https://anne.me, bob.openid.org.cn അല്ലെങ്കിൽ gnusocial.net/carry. -disable_forgot_password_mail=അക്കൗണ്ട് വീണ്ടെടുക്കൽ പ്രവർത്തനരഹിതമാണ്. നിങ്ങളുടെ സൈറ്റ് അഡ്മിനിസ്ട്രേറ്ററുമായി ബന്ധപ്പെടുക. email_domain_blacklisted=നിങ്ങളുടെ ഇമെയിൽ വിലാസത്തിൽ രജിസ്റ്റർ ചെയ്യാൻ കഴിയില്ല. authorize_application=അപ്ലിക്കേഷനു് അംഗീകാരം നല്കുക authorize_application_created_by=%s സൃഷ്‌ടിച്ച അപ്ലിക്കേഷൻ ആണ്. @@ -259,11 +259,18 @@ authorization_failed=അംഗീകാരം നല്‍കുന്നതി authorization_failed_desc=അസാധുവായ ഒരു അഭ്യർത്ഥന കണ്ടെത്തിയതിനാൽ ഞങ്ങൾ അംഗീകാരം പരാജയപ്പെടുത്തി. ദയവായി നിങ്ങൾ അംഗീകരിക്കാൻ ശ്രമിച്ച അപ്ലിക്കേഷന്റെ പരിപാലകനുമായി ബന്ധപ്പെടുക. [mail] + activate_account=നിങ്ങളുടെ അക്കൗണ്ട് സജീവമാക്കുക + activate_email=ഇമെയില്‍ വിലാസം സ്ഥിരീകരിയ്ക്കുക + +register_notify=ഗിറ്റീയിലേയ്ക്കു് സ്വാഗതം + reset_password=നിങ്ങളുടെ അക്കൗണ്ട് വീണ്ടെടുക്കുക + register_success=രജിസ്ട്രേഷൻ വിജയകരം -register_notify=ഗിറ്റീയിലേയ്ക്കു് സ്വാഗതം + + @@ -443,7 +450,6 @@ add_new_key=SSH കീ ചേർക്കുക add_new_gpg_key=GPG കീ ചേർക്കുക ssh_key_been_used=ഈ SSH കീ ഇതിനകം ചേർത്തു. gpg_key_id_used=സമാന ഐഡിയുള്ള ഒരു പൊതു ജിപിജി കീ ഇതിനകം നിലവിലുണ്ട്. -gpg_no_key_email_found=നിങ്ങളുടെ അക്കൗണ്ടുമായി ബന്ധപ്പെട്ട ഏതെങ്കിലും ഇമെയിൽ വിലാസത്തിൽ ഈ GPG കീ ഉപയോഗിക്കാൻ കഴിയില്ല. subkeys=സബ് കീകള്‍ key_id=കീ ഐഡി key_name=കീയുടെ പേരു് @@ -564,6 +570,7 @@ email_notifications.onmention=ഇ-മെയിൽ പരാമര്‍ശിച email_notifications.disable=ഇമെയിൽ അറിയിപ്പുകൾ അപ്രാപ്തമാക്കുക email_notifications.submit=ഇ-മെയില്‍ മുൻഗണനകള്‍ + [repo] owner=ഉടമസ്ഥന്‍ repo_name=കലവറയുടെ പേരു് @@ -611,7 +618,6 @@ archive.pull.nocomment=ഈ കലവറ ചരിത്രപരമായി ന form.name_reserved='%s' എന്ന കലവറയുടെ പേരു് മറ്റാവശ്യങ്ങള്‍ക്കായി നീക്കിവച്ചിരിക്കുന്നു. form.name_pattern_not_allowed=കലവറനാമത്തിൽ '%s' എന്ന ശ്രേണി അനുവദനീയമല്ല. -need_auth=ക്ലോൺ അംഗീകാരിയ്ക്കുക migrate_items=മൈഗ്രേഷൻ ഇനങ്ങൾ migrate_items_wiki=വിക്കി migrate_items_milestones=നാഴികക്കല്ലുകള്‍ @@ -743,6 +749,7 @@ issues.dependency.add_error_cannot_create_circular=രണ്ട് ഇഷ്യ issues.dependency.add_error_dep_not_same_repo=രണ്ട് പ്രശ്നങ്ങളും ഒരേ കലവറയിലേതു് ആയിരിക്കണം. + ; %[2]s
    %[3]s
    diff --git a/options/locale/locale_nl-NL.ini b/options/locale/locale_nl-NL.ini index df3d46d06243..551a05ed0df1 100644 --- a/options/locale/locale_nl-NL.ini +++ b/options/locale/locale_nl-NL.ini @@ -15,6 +15,7 @@ page=Pagina template=Sjabloon language=Taal notifications=Meldingen +active_stopwatch=Actieve Tijd Tracker create_new=Maken… user_profile_and_more=Profiel en instellingen… signed_in_as=Aangemeld als @@ -75,6 +76,7 @@ pull_requests=Pull requests issues=Kwesties milestones=Mijlpalen +ok=OK cancel=Annuleren save=Opslaan add=Toevoegen @@ -86,9 +88,12 @@ write=Schrijf preview=Voorbeeld loading=Laden… +step1=Stap 1: +step2=Stap 2: error404=De pagina die u probeert te bereiken bestaat niet of u bent niet gemachtigd om het te bekijken. + [error] occurred=Er is een fout opgetreden report_message=Als je zeker weet dat dit een Gitea bug is, zoek dan naar een issue op GitHub en open zo nodig een nieuw issue. @@ -201,6 +206,7 @@ default_enable_timetracking=Tijdregistratie standaard inschakelen default_enable_timetracking_popup=Tijdsregistratie voor nieuwe repositories standaard inschakelen. no_reply_address=Verborgen e-maildomein no_reply_address_helper=Domeinnaam voor gebruikers met een verborgen e-mailadres. Bijvoorbeeld zal de gebruikersnaam 'joe' in Git worden geregistreerd als 'joe@noreply.example.org' als het verborgen email domein is ingesteld op 'noreply.example.org'. +password_algorithm=Wachtwoord Hash Algoritme [home] uname_holder=Gebruikersnaam of e-mailadres @@ -214,6 +220,7 @@ my_mirrors=Mijn kopieën view_home=Bekijk %s search_repos=Zoek een repository… filter=Andere filters +filter_by_team_repositories=Filter op team repositories show_archived=Gearchiveerd show_both_archived_unarchived=Toont zowel gearchiveerd als niet-gearchiveerd @@ -288,7 +295,6 @@ openid_connect_desc=De gekozen OpenID-URI is onbekend. Koppel het aan een nieuw openid_register_title=Nieuw account aanmaken openid_register_desc=De gekozen OpenID-URI is onbekend. Koppel het aan een nieuw account hier. openid_signin_desc=Geef uw OpenID-URI. Bijvoorbeeld: https://anne.me, bob.openid.org.cn of gnusocial.net/carry. -disable_forgot_password_mail=Accountherstel is uitgeschakeld. Neem contact op met de beheerder van uw site. email_domain_blacklisted=Je kan je niet registreren met dit e-mailadres. authorize_application=Autoriseer applicatie authorize_redirect_notice=U wordt doorgestuurd naar %s als u deze toepassing toestaat. @@ -302,14 +308,26 @@ password_pwned=Het gekozen wachtwoord staat op een kopie zijn @@ -1191,6 +1210,7 @@ issues.review.un_resolve_conversation=Gesprek niet oplossen issues.review.resolved_by=markeerde dit gesprek als opgelost issues.assignee.error=Niet alle aangewezen personen zijn toegevoegd vanwege een onverwachte fout. + pulls.desc=Schakel pull-aanvragen en code-beoordelingen in. pulls.new=Nieuwe Pull aanvraag pulls.compare_changes=Nieuwe pull-aanvraag @@ -1215,7 +1235,6 @@ pulls.merged_as=De pull request is samengevoegd als Start de titel met %s om te voorkomen dat deze pull-aanvraag per ongeluk wordt samengevoegd.` -pulls.cannot_merge_work_in_progress=Deze pull-aanvraag is als "work in progress" gemarkeerd. Verwijder de %s-prefix van de titel zodra hij klaar is pulls.data_broken=Deze pull-aanvraag is ongeldig wegens missende fork-informatie. pulls.files_conflicted=Dit pull request heeft wijzigingen die strijdig zijn met de doel branch. pulls.is_checking=Controle op samenvoegingsconflicten is nog bezig. Probeer later nog een keer. @@ -1606,7 +1625,6 @@ settings.event_pull_request_review_desc=Pull request goedgekeurd, afgewezen of r settings.event_pull_request_sync=Pull request gesynchroniseerd settings.event_pull_request_sync_desc=Pull request gesynchroniseerd. settings.branch_filter=Branch filter -settings.branch_filter_desc=Branch whitelist voor push, branch creatie en branch verwijdering events, gespecificeerd als glob patroon. Indien leeg of * worden events voor alle branches gerapporteerd. Zie github.com/gobwas/glob documentatie voor syntaxis. Voorbeelden: master, {master,release*}. settings.active=Actief settings.active_helper=Informatie over geactiveerde gebeurtenissen wordt naar deze webhook URL gestuurd. settings.add_hook_success=De webhook is toegevoegd. @@ -1675,7 +1693,6 @@ settings.dismiss_stale_approvals_desc=Wanneer nieuwe commits die de inhoud van h settings.require_signed_commits=Ondertekende Commits vereisen settings.require_signed_commits_desc=Weiger pushes naar deze branch als deze niet ondertekend of niet verifieerbaar is. settings.protect_protected_file_patterns=Beschermde bestandspatronen (gescheiden door een puntkomma '\;'): -settings.protect_protected_file_patterns_desc=Beschermde bestanden die niet direct gewijzigd mogen worden, zelfs als de gebruiker het recht heeft om bestanden in deze branch toe te voegen, te bewerken of te verwijderen. Meerdere patronen kunnen worden gescheiden met een puntkomma ('\;'). Zie github.com/gobwas/glob documentatie voor patroon syntaxis. Voorbeelden: .drone.yml, /docs/**/*.txt. settings.add_protected_branch=Bescherming aanzetten settings.delete_protected_branch=Bescherming uitzetten settings.update_protect_branch_success=Branch bescherming voor branch '%s' is bijgewerkt. @@ -2117,7 +2134,6 @@ auths.host=Host auths.port=Poort auths.bind_dn=Binden DN auths.bind_password=Bind wachtwoord -auths.bind_password_helper=Waarschuwing: Dit wachtwoord wordt opgeslagen in platte tekst. Indien mogelijk gebruik dan een alleen-lezen account. auths.user_base=User Search Base auths.user_dn=User DN auths.attribute_username=Gebruikersnaam attribuut diff --git a/options/locale/locale_pl-PL.ini b/options/locale/locale_pl-PL.ini index ae3c5595f4b5..4b243de3cf46 100644 --- a/options/locale/locale_pl-PL.ini +++ b/options/locale/locale_pl-PL.ini @@ -87,6 +87,7 @@ loading=Ładowanie… error404=Strona, do której próbujesz dotrzeć nie istnieje lub nie jesteś autoryzowany aby go zobaczyć. + [error] occurred=Wystąpił błąd report_message=Jeśli jesteś pewien, że jest to błąd w Gitea, poszukaj problemu na GitHub i w razie potrzeby otwórz nowe zgłoszenie. @@ -286,7 +287,6 @@ openid_connect_desc=Wybrany URI OpenID jest nieznany. Powiąż go z nowym kontem openid_register_title=Stwórz nowe konto openid_register_desc=Wybrany URI OpenID jest nieznany. Powiąż go z nowym kontem w tym miejscu. openid_signin_desc=Wpisz swój URI OpenID. Na przykład: https://anne.me, bob.openid.org.cn or gnusocial.net/carry. -disable_forgot_password_mail=Odzyskiwanie konta jest wyłączone. Skontaktuj się z administratorem strony. email_domain_blacklisted=Nie możesz zarejestrować się za pomocą tego adresu e-mail. authorize_application=Autoryzuj aplikację authorize_redirect_notice=Zostaniesz przekierowany(-a) do %s, jeśli autoryzujesz tę aplikację. @@ -298,11 +298,18 @@ authorization_failed_desc=Autoryzacja nie powiodła się ze względu na niewła sspi_auth_failed=Uwierzytelnianie SSPI nie powiodło się [mail] + activate_account=Aktywuj swoje konto + activate_email=Potwierdź swój adres e-mail + +register_notify=Witamy w Gitea + reset_password=Odzyskaj swoje konto + register_success=Rejestracja powiodła się -register_notify=Witamy w Gitea + + @@ -503,7 +510,6 @@ add_new_gpg_key=Dodaj klucz GPG key_content_gpg_placeholder=Zaczyna się od '-----BEGIN PGP PUBLICZNEJ BLOKI KLUCZOWEJ PGP---' ssh_key_been_used=Ten klucz SSH został już dodany do tego serwera. gpg_key_id_used=Publiczny klucz GPG z tym ID już istnieje. -gpg_no_key_email_found=Tego klucza GPG nie można używać z żadnym adresem e-mail powiązanym z Twoim kontem. subkeys=Podklucze key_id=ID klucza key_name=Nazwa klucza @@ -625,6 +631,7 @@ email_notifications.onmention=Wyślij wiadomość e-mail wyłącznie przy wzmian email_notifications.disable=Wyłącz powiadomienia e-mail email_notifications.submit=Ustaw preferencje wiadomości e-mail + [repo] owner=Właściciel repo_name=Nazwa repozytorium @@ -663,7 +670,6 @@ mirror_prune_desc=Usuń przestarzałe odwołania do zdalnych śledzeń mirror_interval=Przedział czasowy dla tworzenia kopii lustrzanej (prawidłowe jednostki czasu to 'h' (godziny), 'm', 's'). 0, aby wyłączyć automatyczną synchronizację. mirror_interval_invalid=Interwał lustrzanej kopii jest niepoprawny. mirror_address=Sklonuj z adresu URL -mirror_address_desc=Wpisz wymagane dane uwierzytelnienia w sekcji Autoryzacja klonowania. mirror_address_url_invalid=Podany adres URL jest niewłaściwy. Musisz poprawnie escape'ować wszystkie jego elementy. mirror_address_protocol_invalid=Podany adres URL jest niewłaściwy. Tylko z http(s):// lub git:// można utworzyć kopie lustrzane. mirror_last_synced=Ostatnio zsynchronizowano @@ -702,7 +708,6 @@ archive.pull.nocomment=To repozytorium jest zarchiwizowane. Nie możesz komentow form.name_reserved=Nazwa repozytorium „%s” jest zarezerwowana. form.name_pattern_not_allowed=Wzór "%s" nie jest dozwolony w nazwie repozytorium. -need_auth=Autoryzacja klonowania migrate_items=Składniki migracji migrate_items_wiki=Wiki migrate_items_milestones=Kamienie milowe @@ -1128,6 +1133,7 @@ issues.review.un_resolve_conversation=Oznacz dyskusję jako nierozstrzygniętą issues.review.resolved_by=oznaczył(-a) tę rozmowę jako rozwiązaną issues.assignee.error=Nie udało się dodać wszystkich wybranych osób do przypisanych przez nieoczekiwany błąd. + pulls.desc=Włącz Pull Requesty i recenzjonowanie kodu. pulls.new=Nowy Pull Request pulls.compare_changes=Nowy Pull Request @@ -1152,7 +1158,6 @@ pulls.merged_as=Pull Request został scalony jako Poprzedź tytuł przy pomocy %s, aby zapobiec przypadkowemu scaleniu tego Pull Requesta.` -pulls.cannot_merge_work_in_progress=Ten Pull Request został oznaczony jako praca w toku. Usuń prefiks %s z tytułu, kiedy będzie już gotowy. pulls.data_broken=Ten Pull Request jest uszkodzony ze względu na brakujące informacje o forku. pulls.files_conflicted=Ten Pull Request zawiera zmiany konfliktujące z docelową gałęzią. pulls.is_checking=Sprawdzanie konfliktów ze scalaniem w toku. Spróbuj ponownie za chwilę. @@ -1520,7 +1525,6 @@ settings.event_pull_request_review_desc=Pull request zatwierdzony, odrzucony lub settings.event_pull_request_sync=Pull Request Zsynchronizowany settings.event_pull_request_sync_desc=Pull request zsynchronizowany. settings.branch_filter=Filtr gałęzi -settings.branch_filter_desc=Biała lista gałęzi dla przepychania, tworzenia i usuwania gałęzi, określona jako wzorzec glob. Jeśli pusta, lub *, zdarzenia dla wszystkich gałęzi są wyświetlane. Sprawdź dokumentację github.com/gobwas/glob dla składni. Przykładowo: master, {master,release*}. settings.active=Aktywne settings.active_helper=Informacja o wywołanych wydarzeniach będzie przesłana do tego adresu URL Webhooka. settings.add_hook_success=Webhook został dodany. @@ -1589,7 +1593,6 @@ settings.dismiss_stale_approvals_desc=Kiedy nowe commity zmieniające zawartoś settings.require_signed_commits=Wymagaj podpisanych commitów settings.require_signed_commits_desc=Odrzucaj zmiany wypychane do tej gałęzi, jeśli nie są podpisane, lub są niemożliwe do zweryfikowania. settings.protect_protected_file_patterns=Wzory chronionych plików (rozdzielone średnikiem '\;'): -settings.protect_protected_file_patterns_desc=Chronione pliki, które nie mogą być zmienione bezpośrednio, nawet jeśli użytkownik ma uprawnienia do dodawania, edytowania lub usuwania plików w tej gałęzi. Wzorce można rozdzielić za pomocą średnika ('\;'). Zobacz dokumentację github.com/gobwas/glob dla składni wzorca. Przykłady: .drone.yml, /docs/**/*.txt. settings.add_protected_branch=Włącz ochronę settings.delete_protected_branch=Wyłącz ochronę settings.update_protect_branch_success=Ochrona gałęzi dla gałęzi "%s" została zaktualizowana. @@ -2026,7 +2029,6 @@ auths.host=Serwer auths.port=Port auths.bind_dn=DN powiązania auths.bind_password=Hasło Bind -auths.bind_password_helper=Uwaga: To hasło będzie przechowywane w czystym tekście. Użyj konta "tylko do odczytu", jeśli to możliwe. auths.user_base=Baza wyszukiwania auths.user_dn=DN użytkownika auths.attribute_username=Atrybut nazwy użytkownika diff --git a/options/locale/locale_pt-BR.ini b/options/locale/locale_pt-BR.ini index 28da2491becc..e95b388df08e 100644 --- a/options/locale/locale_pt-BR.ini +++ b/options/locale/locale_pt-BR.ini @@ -93,6 +93,7 @@ step2=Passo 2: error404=A página que você está tentando acessar não existe ou você não está autorizado a visualizá-la. + [error] occurred=Ocorreu um erro report_message=Se você tem certeza de que se trata de um bug do Gitea, por favor, procure a issue no GitHub e abra novas issues se necessário. @@ -298,7 +299,6 @@ openid_connect_desc=O URI do OpenID escolhido é desconhecido. Associe-o com uma openid_register_title=Criar uma nova conta openid_register_desc=O URI do OpenID escolhido é desconhecido. Associe-o com uma nova conta aqui. openid_signin_desc=Digite a URI do seu OpenID. Por exemplo: https://anne.me, bob.openid.org.cn ou gnusocial.net/carry. -disable_forgot_password_mail=Recuperação de conta está desativada. Por favor, contate o administrador do servidor. email_domain_blacklisted=Você não pode se cadastrar com seu endereço de e-mail. authorize_application=Autorizar aplicativo authorize_redirect_notice=Você será redirecionado para %s se você autorizar este aplicativo. @@ -312,11 +312,18 @@ password_pwned=A senha escolhida está em uma lista de espelho @@ -1212,6 +1217,7 @@ issues.review.show_outdated=Mostrar desatualizado issues.review.hide_outdated=Ocultar desatualizado issues.assignee.error=Nem todos os responsáveis foram adicionados devido a um erro inesperado. + pulls.desc=Habilitar pull requests e revisões de código. pulls.new=Novo pull request pulls.compare_changes=Novo pull request @@ -1236,7 +1242,6 @@ pulls.merged_as=O pull request teve merge aplicado como Inicie o título com o prefixo %s para prevenir o merge do pull request até que o mesmo esteja pronto.` -pulls.cannot_merge_work_in_progress=Este pull request está marcado como um trabalho em andamento. Remova o prefixo %s do título quando estiver pronto pulls.data_broken=Este pull request está quebrado devido a falta de informação do fork. pulls.files_conflicted=Este pull request tem alterações conflitantes com o branch de destino. pulls.is_checking=Verificação de conflitos do merge está em andamento. Tente novamente em alguns momentos. @@ -1554,7 +1559,6 @@ settings.event_issue_comment=Comentário da issue settings.event_issue_comment_desc=Comentário da issue criado, editado ou excluído. settings.event_pull_request=Pull request settings.branch_filter=Filtro de branch -settings.branch_filter_desc=Controle de permissão de push, eventos de criação e exclusão de branch, especificados como padrão glob. Se vazio ou *, eventos para todos os branches serão relatados. Veja github.com/gobwas/glob documentação para sintaxe. Exemplos: master, {master,release*}. settings.active=Ativo settings.active_helper=Informações sobre eventos disparados serão enviadas para esta URL do webhook. settings.add_hook_success=O webhook foi adicionado. @@ -2015,7 +2019,6 @@ auths.host=Servidor auths.port=Porta auths.bind_dn=Vincular DN auths.bind_password=Vincular senha -auths.bind_password_helper=Atenção: Esta senha é armazenada em texto sem formatação. Se possível, use uma conta de somente leitura. auths.user_base=Base de pesquisa do usuário auths.user_dn=Usuário do DN auths.attribute_username=Atributo nome de usuário @@ -2039,6 +2042,7 @@ auths.allowed_domains_helper=Deixe em branco para permitir todos os domínios. S auths.enable_tls=Habilitar Criptografia TLS auths.skip_tls_verify=Pular verificação de TLS auths.pam_service_name=Nome de Serviço PAM +auths.pam_email_domain=Domínio de e-mail do PAM (opcional) auths.oauth2_provider=Provedor OAuth2 auths.oauth2_clientID=ID do cliente (chave) auths.oauth2_clientSecret=Senha do cliente diff --git a/options/locale/locale_pt-PT.ini b/options/locale/locale_pt-PT.ini index 043c133838b5..24b54a918945 100644 --- a/options/locale/locale_pt-PT.ini +++ b/options/locale/locale_pt-PT.ini @@ -83,6 +83,7 @@ add=Adicionar add_all=Adicionar tudo remove=Remover remove_all=Remover tudo +edit=Editar write=Escrever preview=Pré-visualizar @@ -91,11 +92,16 @@ loading=Carregando… step1=Passo 1: step2=Passo 2: +error=Erro error404=A página que pretende aceder não existe ou não tem autorização para a ver. +never=Nunca + [error] occurred=Ocorreu um erro report_message=Se tiver certeza de que se trata de um erro do Gitea, por favor, procure a questão no GitHub e abra uma nova questão, se necessário. +missing_csrf=Pedido inválido: não há código CSRF +invalid_csrf=Pedido inválido: código CSRF inválido [startpage] app_desc=Um serviço Git auto-hospedado e fácil de usar @@ -193,7 +199,7 @@ sqlite3_not_available=Esta versão do Gitea não suporta o SQLite3. Descarregue invalid_db_setting=As configurações da base de dados são inválidas: %v invalid_repo_path=A localização base dos repositórios é inválida: %v run_user_not_match=O nome de utilizador para 'executar como' não é o nome de utilizador corrente: %s → %s -save_config_failed=Falha ao guardar a configuração: %v +save_config_failed=Falhou ao guardar a configuração: %v invalid_admin_setting=A configuração da conta de administrador é inválida: %v install_success=Bem-vindo(a)! Obrigado por escolher o Gitea. Divirta-se e aproveite! invalid_log_root_path=A localização dos registos é inválida: %v @@ -279,7 +285,7 @@ reset_password_helper=Recuperar conta reset_password_wrong_user=Tem conta iniciada como %s, mas a ligação de recuperação de conta é para %s password_too_short=O tamanho da senha não pode ser inferior a %d caracteres. non_local_account=Os utilizadores não-locais não podem alterar a sua senha através da interface web do Gitea. -verify=Verificar +verify=Validar scratch_code=Código de recuperação use_scratch_code=Usar um código de recuperação twofa_scratch_used=Você usou o seu código de recuperação. Foi reencaminhado para a página de configurações da autenticação em dois passos para poder remover o registo do seu dispositivo ou gerar um novo código de recuperação. @@ -299,7 +305,8 @@ openid_connect_desc=O URI do OpenID escolhido é desconhecido. Associe-o a uma n openid_register_title=Criar uma conta nova openid_register_desc=O URI do OpenID escolhido é desconhecido. Associe-o a uma nova conta aqui. openid_signin_desc=Insira o seu URI OpenID. Por exemplo: https://maria.me, manuel.openid.org.cn ou gnusocial.net/antonio. -disable_forgot_password_mail=A recuperação de conta está desabilitada. Entre em contacto com o administrador do sítio. +disable_forgot_password_mail=A recuperação de conta está desabilitada porque não foi definido o email. Entre em contacto com o administrador do sítio. +disable_forgot_password_mail_admin=A recuperação de conta só está disponível quando o email está configurado. Por favor, configure o email para permitir a recuperação de conta. email_domain_blacklisted=Não pode fazer um registo com o seu endereço de email. authorize_application=Autorizar aplicação authorize_redirect_notice=Irá ser reencaminhado para %s se autorizar esta aplicação. @@ -313,14 +320,64 @@ password_pwned=A senha utilizada está numa defina a sua senha primeiro. +reset_password=Recupere a sua conta +reset_password.title=%s, você pediu para recuperar a sua conta +reset_password.text=Por favor clique na seguinte ligação para recuperar a sua conta em %s: +register_success=Inscrição bem sucedida +issue_assigned.pull=@%[1]s atribuiu-lhe o pedido de integração %[2]s no repositório %[3]s. +issue_assigned.issue=@%[1]s atribuiu-lhe a questão %[2]s no repositório %[3]s. + +issue.x_mentioned_you=@%s mencionou a si: +issue.action.force_push=%[1]s forçou o envio de %[2]s de %[3]s para %[4]s. +issue.action.push_1=@%[1]s enviou %[3]d cometimento(s) para o ramo %[2]s +issue.action.push_n=@%[1]s enviou %[3]d cometimentos para o ramo %[2]s +issue.action.close=@%[1]s fechou #%[2]d. +issue.action.reopen=@%[1]s reabriu #%[2]d. +issue.action.merge=@%[1]s integrou #%[2]d no ramo %[3]s. +issue.action.approve=@%[1]s aprovou este pedido de integração. +issue.action.reject=@%[1]s solicitou modificações sobre este pedido de integração. +issue.action.review=@%[1]s fez um comentário sobre este pedido de integração. +issue.action.review_dismissed=@%[1]s descartou a última revisão de %[2]s sobre este pedido de integração. +issue.action.ready_for_review=@%[1]s marcou este pedido de integração como estando pronto para revisão. +issue.action.new=@%[1]s criou #%[2]d. +issue.in_tree_path=Em %s: + +release.new.subject=%s em %s lançado +release.new.text=@%[1]s lançou %[2]s em %[3]s +release.title=Título: %s +release.note=Nota: +release.downloads=Descargas: +release.download.zip=Código fonte (ZIP) +release.download.targz=Código fonte (TAR.GZ) + +repo.transfer.subject_to=%s gostaria de transferir "%s" para %s +repo.transfer.subject_to_you=%s gostaria de transferir "%s" para si +repo.transfer.to_you=você +repo.transfer.body=Para o aceitar ou rejeitar visite %s, ou ignore-o, simplesmente. + +repo.collaborator.added.subject=%s adicionou você a %s +repo.collaborator.added.text=Foi adicionado(a) como colaborador(a) do repositório: [modal] yes=Sim @@ -348,7 +405,7 @@ TreeName=Caminho do ficheiro Content=Conteúdo SSPISeparatorReplacement=Separador -SSPIDefaultLanguage=Idioma padrão +SSPIDefaultLanguage=Idioma predefinido require_error=` não pode estar em branco.` alpha_dash_error=` deve conter apenas caracteres alfanuméricos, hífen ('-') e sublinhado ('_').` @@ -361,6 +418,7 @@ email_error=` não é um endereço de email válido.` url_error=` não é um URL válido.` include_error=` tem que conter o texto '%s'.` glob_pattern_error=` o padrão glob é inválido: %s.` +regex_pattern_error=` o padrão regex é inválido: %s.` unknown_error=Erro desconhecido: captcha_incorrect=O código CAPTCHA está errado. password_not_match=As senhas não coincidem. @@ -396,11 +454,11 @@ team_not_exist=A equipa não existe. last_org_owner=Não pode remover o último utilizador da equipa 'proprietários'. Tem que haver pelo menos um proprietário numa organização. cannot_add_org_to_team=Uma organização não pode ser adicionada como membro de uma equipa. -invalid_ssh_key=Não é possível verificar sua chave SSH: %s -invalid_gpg_key=Não é possível verificar sua chave GPG: %s +invalid_ssh_key=Não é possível validar a sua chave SSH: %s +invalid_gpg_key=Não é possível validar a sua chave GPG: %s invalid_ssh_principal=Protagonista inválido: %s -unable_verify_ssh_key=Não é possível verificar a chave SSH; verifique novamente se há erros. -auth_failed=Falha na autenticação: %v +unable_verify_ssh_key=Não é possível validar a chave SSH; verifique novamente se há erros. +auth_failed=Falhou a autenticação: %v still_own_repo=A sua conta possui um ou mais repositórios; deve excluí-los ou transferi-los primeiro. still_has_org=A sua conta é membro de uma ou mais organizações; deixe-as primeiro. @@ -409,7 +467,7 @@ org_still_own_repo=Esta organização ainda possui repositórios; deve excluí-l target_branch_not_exist=O ramo de destino não existe. [user] -change_avatar=Alterar seu avatar… +change_avatar=Mude o seu avatar… join_on=Inscreveu-se em repositories=Repositórios activity=Trabalho público @@ -457,7 +515,7 @@ update_theme=Substituir tema update_profile=Modificar perfil update_language_not_found=O idioma '%s' não está disponível. update_profile_success=O seu perfil foi modificado. -change_username=Seu nome de utilizador foi alterado. +change_username=O seu nome de utilizador foi modificado. change_username_prompt=Nota: alterações do nome de utilizador também alteram o URL de sua conta. change_username_redirect_prompt=O antigo nome de utilizador, enquanto não for reivindicado, irá reencaminhar para o novo. continue=Continuar @@ -474,8 +532,8 @@ enable_custom_avatar=Usar avatar personalizado choose_new_avatar=Escolher um novo avatar update_avatar=Substituir avatar delete_current_avatar=Eliminar o avatar corrente -uploaded_avatar_not_a_image=O ficheiro enviado não é uma imagem. -uploaded_avatar_is_too_big=O ficheiro enviado excedeu o tamanho máximo. +uploaded_avatar_not_a_image=O ficheiro carregado não é uma imagem. +uploaded_avatar_is_too_big=O ficheiro carregado excedeu o tamanho máximo. update_avatar_success=O seu avatar foi substituído. change_password=Substituir a senha @@ -525,7 +583,7 @@ manage_gpg_keys=Gerir chaves GPG add_key=Adicionar chave ssh_desc=Essas chaves públicas SSH estão associadas à sua conta. As chaves privadas correspondentes permitem acesso total aos seus repositórios. principal_desc=Estes protagonistas de certificados SSH estão associados à sua conta e permitem acesso total aos seus repositórios. -gpg_desc=Essas chaves GPG públicas estão associadas à sua conta. Mantenha as suas chaves privadas seguras, uma vez que elas permitem a verificação dos cometimentos. +gpg_desc=Essas chaves GPG públicas estão associadas à sua conta. Mantenha as suas chaves privadas seguras, uma vez que elas permitem a validação dos cometimentos. ssh_helper=Precisa de ajuda? Dê uma vista de olhos no guia do GitHub para criar as suas próprias chaves SSH ou para resolver problemas comuns que pode encontrar ao usar o SSH. gpg_helper=Precisa de ajuda? Dê uma vista de olhos no guia do GitHub sobre GPG. add_new_key=Adicionar Chave SSH @@ -537,7 +595,20 @@ ssh_key_been_used=Esta chave SSH já tinha sido adicionada ao servidor. ssh_key_name_used=Já existe uma chave SSH com o mesmo nome na sua conta. ssh_principal_been_used=Este protagonista já tinha sido adicionado ao servidor. gpg_key_id_used=Já existe uma chave pública GPG com o mesmo ID. -gpg_no_key_email_found=Esta chave GPG não é utilizável com qualquer endereço de email associado à sua conta. +gpg_no_key_email_found=Esta chave GPG não corresponde a nenhum endereço de email em uso associado à sua conta. No entanto, a chave ainda poderá ser adicionada, se você assinar o código fornecido. +gpg_key_matched_identities=Identidades correspondentes: +gpg_key_matched_identities_long=As identidades incorporadas nesta chave correspondem aos seguintes endereços de email em uso por parte deste utilizador. Os cometimentos que correspondam a estes endereços de email podem ser validados com esta chave. +gpg_key_verified=Chave validada +gpg_key_verified_long=A chave foi validada com um código e pode ser usada para validar cometimentos que correspondam a qualquer dos endereços de email em uso por parte deste utilizador, para além das identidades correspondentes a esta chave. +gpg_key_verify=Validar +gpg_invalid_token_signature=A chave GPG, assinatura ou código fornecidos não correspondem ou então o código expirou. +gpg_token_required=Tem que fornecer uma assinatura para o código abaixo +gpg_token=Código +gpg_token_help=Pode gerar uma assinatura usando o seguinte comando: +gpg_token_code=echo "%s" | gpg -a --default-key %s --detach-sig +gpg_token_signature=Assinatura GPG blindada (com armadura ASCII) +key_signature_gpg_placeholder=Começa com '-----BEGIN PGP SIGNATURE-----' +verify_gpg_key_success=A chave GPG '%s' foi validada. subkeys=Subchaves key_id=ID da chave key_name=Nome da chave @@ -551,7 +622,7 @@ ssh_key_deletion=Remover chave SSH gpg_key_deletion=Remover chave GPG ssh_principal_deletion=Remover Protagonista de Certificado SSH ssh_key_deletion_desc=Remover uma chave SSH revoga o acesso dessa chave à sua conta. Quer continuar? -gpg_key_deletion_desc=Remover uma chave GPG retira as verificações feitas sobre os cometimentos assinados com ela. Quer continuar? +gpg_key_deletion_desc=Remover uma chave GPG retira as validações feitas sobre os cometimentos assinados com ela. Quer continuar? ssh_principal_deletion_desc=Remover um Protagonista de Certificado SSH revoga o seu acesso à sua conta. Quer continuar? ssh_key_deletion_success=A chave SSH foi removida. gpg_key_deletion_success=A chave GPG foi removida. @@ -636,7 +707,7 @@ or_enter_secret=Ou insira o segredo: %s then_enter_passcode=E insira o código apresentado na aplicação: passcode_invalid=O código está errado. Tente de novo. twofa_enrolled=A sua conta usa autenticação em dois passos. Guarde o seu código de recuperação (%s) num lugar seguro porque é mostrado somente uma vez! -twofa_failed_get_secret=Falha ao obter o segredo. +twofa_failed_get_secret=Falhou a obtenção do segredo. u2f_desc=Chaves de segurança são dispositivos de hardware contendo chaves criptográficas. Podem ser usadas para autenticação em dois passos. As chaves de segurança têm de suportar o standard FIDO U2F. u2f_require_twofa=A sua conta tem que ter habilitada a autenticação em dois passos para poder usar chaves de segurança. @@ -668,6 +739,14 @@ email_notifications.onmention=Enviar email somente quando mencionado(a) email_notifications.disable=Desabilitar notificações por email email_notifications.submit=Definir preferência do email +visibility=Visibilidade do utilizador +visibility.public=Pública +visibility.public_tooltip=Visível para todos os utilizadores +visibility.limited=Limitada +visibility.limited_tooltip=Visível apenas para utilizadores com sessão iniciada +visibility.private=Privada +visibility.private_tooltip=Visível apenas para membros da organização + [repo] new_repo_helper=Um repositório contém todos os ficheiros do projecto, incluindo o histórico das revisões. Já o tem noutro sítio? Migre o repositório. owner=Proprietário(a) @@ -705,23 +784,30 @@ readme=README readme_helper=Escolha um modelo de ficheiro README. readme_helper_desc=Este é o sítio onde pode escrever uma descrição completa do seu projecto. auto_init=Inicializar repositório (adiciona `.gitignore`, `LICENSE` e `README.md`) -trust_model_helper=Escolha o modelo de confiança para a verificação das assinaturas. As opções são: +trust_model_helper=Escolha o modelo de confiança para a validação das assinaturas. As opções são: trust_model_helper_collaborator=Colaborador: Confiar nas assinaturas dos colaboradores trust_model_helper_committer=Autor do cometimento: Confiar nas assinaturas que correspondem a autores de cometimentos trust_model_helper_collaborator_committer=Colaborador + Autor do cometimento: Confiar nas assinaturas de colaboradores que correspondem ao autor do cometimento trust_model_helper_default=Padrão: Usar o modelo de confiança padrão para esta instalação create_repo=Criar repositório -default_branch=Ramo padrão -default_branch_helper=O ramo padrão é o ramo base para pedidos de integração e cometimentos. +default_branch=Ramo principal +default_branch_helper=O ramo principal é o ramo base para pedidos de integração e cometimentos. mirror_prune=Podar mirror_prune_desc=Remover referências obsoletas de seguimento remoto mirror_interval=Intervalo de espelhamento (as unidade de tempo válidas são 'h', 'm' e 's'). O valor zero desabilita a sincronização automática. mirror_interval_invalid=O intervalo do espelhamento não é válido. mirror_address=Clonar a partir do URL -mirror_address_desc=Coloque, na secção de Autorização de Clonagem, as credenciais que, eventualmente, sejam necessárias. +mirror_address_desc=Coloque, na secção de Autorização, as credenciais que, eventualmente, sejam necessárias. mirror_address_url_invalid=O URL fornecido é inválido. Tem que codificar adequadamente todos os componentes do URL. mirror_address_protocol_invalid=O URL fornecido é inválido. Só se pode espelhar a partir de endereços http(s):// ou git://. +mirror_lfs=Armazenamento de Ficheiros Grandes (LFS) +mirror_lfs_desc=Habilitar o espelhamento de dados LFS. +mirror_lfs_endpoint=Destino LFS +mirror_lfs_endpoint_desc=A sincronização irá tentar usar o URL de clonagem para determinar o servidor LFS. Também pode especificar um destino personalizado se os dados do repositório LFS forem armazenados noutro lugar. mirror_last_synced=Última sincronização +mirror_password_placeholder=(inalterada) +mirror_password_blank_placeholder=(não definida) +mirror_password_help=Altere o nome de utilizador para eliminar uma senha armazenada. watchers=Vigilantes stargazers=Fãs forks=Derivações @@ -738,6 +824,7 @@ delete_preexisting_label=Eliminar delete_preexisting=Eliminar ficheiros pré-existentes delete_preexisting_content=Eliminar ficheiros em %s delete_preexisting_success=Eliminados os ficheiros não adoptados em %s +blame_prior=Ver a responsabilização anterior a esta modificação transfer.accept=Aceitar transferência transfer.accept_desc=Transferir para "%s" @@ -755,7 +842,7 @@ desc.internal_template=Modelo interno desc.archived=Arquivado template.items=Itens do modelo -template.git_content=Conteúdo Git (ramo padrão) +template.git_content=Conteúdo Git (ramo principal) template.git_hooks=Automatismos do Git template.git_hooks_tooltip=Neste momento não pode modificar ou remover automatismos do git depois de adicionados. Escolha esta opção somente se confiar no repositório modelo. template.webhooks=Automatismos web @@ -765,7 +852,7 @@ template.issue_labels=Rótulos das questões template.one_item=Tem que escolher pelo menos um item do modelo template.invalid=Tem que escolher um repositório modelo -archive.title=Este repositório está arquivado. Pode ver ficheiros e cloná-lo, mas não pode fazer envios ou lançar questões ou pedidos de integração. +archive.title=Este repositório está arquivado. Pode ver os seus ficheiros e cloná-lo, mas não pode fazer envios para o repositório nem lançar questões ou fazer pedidos de integração. archive.issue.nocomment=Este repositório está arquivado. Não pode comentar nas questões. archive.pull.nocomment=Este repositório está arquivado. Não pode comentar nos pedidos de integração. @@ -774,11 +861,15 @@ form.reach_limit_of_creation_n=Já atingiu o seu limite de %d repositórios. form.name_reserved=O nome de repositório '%s' está reservado. form.name_pattern_not_allowed=O padrão '%s' não é permitido no nome de um repositório. -need_auth=Autorização de clonagem +need_auth=Autorização migrate_options=Opções de migração migrate_service=Serviço de migração migrate_options_mirror_helper=Este repositório irá ser um espelho migrate_options_mirror_disabled=O administrador desabilitou novos espelhos. +migrate_options_lfs=Migrar ficheiros LFS +migrate_options_lfs_endpoint.label=Destino LFS +migrate_options_lfs_endpoint.description=A migração irá tentar usar o seu controlo remoto do Git para determinar o servidor LFS. Também pode especificar um destino personalizado se os dados do repositório LFS forem armazenados noutro lugar. +migrate_options_lfs_endpoint.description.local=Um caminho de servidor local também é suportado. migrate_options_lfs_endpoint.placeholder=Deixe em branco para derivar do URL de clonagem migrate_items=Itens da migração migrate_items_wiki=Wiki @@ -796,18 +887,27 @@ migrate.permission_denied=Não está autorizado a importar repositórios locais. migrate.permission_denied_blocked=Não tem permissão para importar a partir de servidores bloqueados. migrate.permission_denied_private_ip=Não tem permissão para importar a partir de IPs privados. migrate.invalid_local_path=O caminho local é inválido. Não existe ou não é uma pasta. +migrate.invalid_lfs_endpoint=O destino LFS não é válido. migrate.failed=A migração falhou: %v migrate.migrate_items_options=É necessário um código de acesso para migrar itens adicionais migrated_from=Migrado de %[2]s migrated_from_fake=Migrado de %[1]s migrate.migrate=Migrar de %s -migrate.migrating=Migrando de %s... +migrate.migrating=Migrando a partir de %s ... migrate.migrating_failed=A migração de %s falhou. -migrate.github.description=Migrando dados do Github.com ou do Github Enterprise. -migrate.git.description=Migrando ou espelhando dados git a partir de serviços Git -migrate.gitlab.description=Migrando dados do GitLab.com ou de um servidor GitLab auto-hospedado. -migrate.gitea.description=Migrando dados do Gitea.com ou de um servidor Gitea auto-hospedado. -migrate.gogs.description=Migrando dados de notabug.com ou de outro servidor Gogs auto-hospedado. +migrate.migrating_failed.error=Erro: %s +migrate.github.description=Migrar dados do Github.com ou do Github Enterprise. +migrate.git.description=Migrar ou espelhar dados git a partir de serviços Git +migrate.gitlab.description=Migrar dados do GitLab.com ou de um servidor GitLab auto-hospedado. +migrate.gitea.description=Migrar dados do Gitea.com ou de um servidor Gitea auto-hospedado. +migrate.gogs.description=Migrar dados de notabug.com ou de outro servidor Gogs auto-hospedado. +migrate.migrating_git=Migrando dados Git +migrate.migrating_topics=Migrando tópicos +migrate.migrating_milestones=Migrando etapas +migrate.migrating_labels=Migrando rótulos +migrate.migrating_releases=Migrando lançamentos +migrate.migrating_issues=Migrando questões +migrate.migrating_pulls=Migrando pedidos de integração mirror_from=espelho de forked_from=derivado de @@ -840,6 +940,7 @@ branch=Ramo tree=Árvore clear_ref=`Apagar a referência vigente` filter_branch_and_tag=Filtrar ramo ou etiqueta +find_tag=Procurar etiqueta branches=Ramos tags=Etiquetas issues=Questões @@ -878,24 +979,24 @@ line=linha lines=linhas editor.new_file=Novo ficheiro -editor.upload_file=Enviar ficheiro +editor.upload_file=Carregar ficheiro editor.edit_file=Editar ficheiro -editor.preview_changes=Pré-visualizar alterações +editor.preview_changes=Pré-visualizar modificações editor.cannot_edit_lfs_files=Ficheiros LFS não podem ser editados na interface web. editor.cannot_edit_non_text_files=Ficheiros binários não podem ser editados na interface da web. editor.edit_this_file=Editar ficheiro editor.this_file_locked=Ficheiro bloqueado -editor.must_be_on_a_branch=Tem que estar num ramo para fazer ou propor alterações neste ficheiro. -editor.fork_before_edit=Tem que fazer uma derivação deste repositório para fazer ou propor alterações neste ficheiro. +editor.must_be_on_a_branch=Tem que estar num ramo para fazer ou propor modificações neste ficheiro. +editor.fork_before_edit=Tem que fazer uma derivação deste repositório para fazer ou propor modificações neste ficheiro. editor.delete_this_file=Eliminar ficheiro -editor.must_have_write_access=Tem que ter permissões de escrita para fazer ou propor alterações neste ficheiro. +editor.must_have_write_access=Tem que ter permissões de escrita para fazer ou propor modificações neste ficheiro. editor.file_delete_success=O ficheiro '%s' foi eliminado. editor.name_your_file=Nomeie o seu ficheiro… editor.filename_help=Adicione uma pasta escrevendo o nome dessa pasta seguido de uma barra('/'). Remova uma pasta carregando na tecla de apagar ('←') no início do campo. editor.or=ou editor.cancel_lower=Cancelar -editor.commit_signed_changes=Cometer alterações assinadas -editor.commit_changes=Cometer alterações +editor.commit_signed_changes=Cometer modificações assinadas +editor.commit_changes=Cometer modificações editor.add_tmpl=Adicionar '' editor.add=Adicionar '%s' editor.update=Modificar '%s' @@ -905,7 +1006,7 @@ editor.signoff_desc=Adicionar "Assinado-por" seguido do autor do cometimento no editor.commit_directly_to_this_branch=Cometer imediatamente no ramo %s. editor.create_new_branch=Crie um novo ramo para este cometimento e inicie um pedido de integração. editor.create_new_branch_np=Criar um novo ramo para este cometimento. -editor.propose_file_change=Propor alteração de ficheiro +editor.propose_file_change=Propor modificação do ficheiro editor.new_branch_name_desc=Nome do novo ramo… editor.cancel=Cancelar editor.filename_cannot_be_empty=O nome do ficheiro não pode estar em branco. @@ -921,22 +1022,22 @@ editor.file_changed_while_editing=O conteúdo do ficheiro mudou desde que começ editor.file_already_exists=Já existe um ficheiro com o nome '%s' neste repositório. editor.commit_empty_file_header=Cometer um ficheiro vazio editor.commit_empty_file_text=O ficheiro que está prestes a cometer está vazio. Quer continuar? -editor.no_changes_to_show=Não existem alterações a mostrar. +editor.no_changes_to_show=Não existem modificações para mostrar. editor.fail_to_update_file=Falhou ao modificar/criar o ficheiro '%s'. editor.fail_to_update_file_summary=Mensagem de erro: -editor.push_rejected_no_message=A alteração foi rejeitada pelo servidor sem qualquer mensagem. Verifique os automatismos do Git. -editor.push_rejected=A alteração foi rejeitada pelo servidor. Verifique os automatismos do Git. +editor.push_rejected_no_message=A modificação foi rejeitada pelo servidor sem qualquer mensagem. Verifique os automatismos do Git. +editor.push_rejected=A modificação foi rejeitada pelo servidor. Verifique os automatismos do Git. editor.push_rejected_summary=Mensagem completa de rejeição: editor.add_subdir=Adicionar uma pasta… -editor.unable_to_upload_files=Falha ao enviar ficheiros para '%s' com erro: %v +editor.unable_to_upload_files=Falhou o carregamento de ficheiros para '%s' com o erro: %v editor.upload_file_is_locked=O ficheiro '%s' está bloqueado por %s. -editor.upload_files_to_dir=Enviar ficheiros para '%s' +editor.upload_files_to_dir=Carregar ficheiros para '%s' editor.cannot_commit_to_protected_branch=Não é possível cometer para o ramo protegido '%s'. editor.no_commit_to_branch=Não é possível cometer imediatamente para o ramo porque: editor.user_no_push_to_branch=O utilizador não pode enviar para o ramo editor.require_signed_commit=O ramo requer um cometimento assinado -commits.desc=Navegar pelo histórico de alterações no código fonte. +commits.desc=Navegar pelo histórico de modificações no código fonte. commits.commits=Cometimentos commits.no_commits=Não há cometimentos em comum. '%s' e '%s' têm históricos completamente diferentes. commits.search=Procurar cometimentos… @@ -984,7 +1085,7 @@ projects.board.new_title=Novo nome para o quadro projects.board.new_submit=Submeter projects.board.new=Novo quadro projects.board.set_default=Definir como padrão -projects.board.set_default_desc=Definir este painel como padrão para recebimentos e questões sem categoria +projects.board.set_default_desc=Definir este painel como padrão para puxadas e questões não categorizadas projects.board.delete=Eliminar quadro projects.board.deletion_desc=Eliminar um quadro de projecto faz com que todas as questões relacionadas sejam movidas para 'Sem categoria'. Continuar? projects.open=Abrir @@ -996,7 +1097,7 @@ issues.filter_milestones=Filtrar etapa issues.filter_projects=Filtrar projecto issues.filter_labels=Filtrar rótulo issues.filter_reviewers=Filtrar revisor -issues.new=Nova questão +issues.new=Questão nova issues.new.title_empty=O título não pode estar vazio issues.new.labels=Rótulos issues.new.add_labels_title=Aplicar rótulos @@ -1034,7 +1135,7 @@ issues.label_templates.title=Carregar um conjunto predefinido de rótulos issues.label_templates.info=Ainda não existem rótulos. Crie um rótulo com 'Novo rótulo' ou use um conjunto de rótulos predefinido: issues.label_templates.helper=Escolha um conjunto de rótulos issues.label_templates.use=Usar conjunto de rótulos -issues.label_templates.fail_to_load_file=Falha ao carregar o ficheiro modelo de rótulos '%s': %v +issues.label_templates.fail_to_load_file=Falhou ao carregar o ficheiro modelo de rótulos '%s': %v issues.add_label=adicionou o rótulo %s %s issues.add_labels=adicionou os rótulos %s %s issues.remove_label=removeu o rótulo %s %s @@ -1076,8 +1177,8 @@ issues.filter_sort.recentupdate=Modificadas recentemente issues.filter_sort.leastupdate=Modificadas há muito tempo issues.filter_sort.mostcomment=Mais comentadas issues.filter_sort.leastcomment=Menos comentadas -issues.filter_sort.nearduedate=Prazo mais próximo -issues.filter_sort.farduedate=Prazo mais distante +issues.filter_sort.nearduedate=Data de vencimento mais próxima +issues.filter_sort.farduedate=Data de vencimento mais distante issues.filter_sort.moststars=Favorito (decrescente) issues.filter_sort.feweststars=Favorito (crescente) issues.filter_sort.mostforks=Mais derivações @@ -1097,18 +1198,20 @@ issues.opened_by_fake=de %[2]s aberto %[1]s issues.closed_by_fake=de %[2]s fechada %[1]s issues.previous=Anterior issues.next=Seguinte -issues.open_title=Abertas -issues.closed_title=Fechadas +issues.open_title=Aberta +issues.closed_title=Fechada issues.num_comments=%d comentários issues.commented_at=`comentado %s` issues.delete_comment_confirm=Tem a certeza que quer eliminar este comentário? issues.context.copy_link=Copiar ligação issues.context.quote_reply=Citar resposta -issues.context.reference_issue=Criar nova questão referindo esta +issues.context.reference_issue=Criar uma nova questão referindo esta issues.context.edit=Editar issues.context.delete=Eliminar issues.no_content=Ainda não há conteúdo. issues.close_issue=Fechar +issues.pull_merged_at=`integrou o cometimento %[2]s no ramo %[3]s %[4]s` +issues.manually_pull_merged_at=`integrou o cometimento %[2]s no ramo %[3]s manualmente %[4]s` issues.close_comment_issue=Comentar e fechar issues.reopen_issue=Reabrir issues.reopen_comment_issue=Comentar e reabrir @@ -1127,7 +1230,7 @@ issues.poster=Autor issues.collaborator=Colaborador(a) issues.owner=Proprietário(a) issues.re_request_review=Voltar a solicitar revisão -issues.is_stale=Houve alterações neste pedido de integração posteriormente a esta revisão +issues.is_stale=Houve modificações neste pedido de integração posteriormente a esta revisão issues.remove_request_review=Remover solicitação de revisão issues.remove_request_review_block=Não é possível remover a solicitação de revisão issues.dismiss_review=Descartar revisão @@ -1169,7 +1272,7 @@ issues.unlock_confirm=Desbloquear issues.lock.notice_1=- Outros utilizadores não podem adicionar novos comentários a esta questão. issues.lock.notice_2=- Você e outros colaboradores com acesso a este repositório ainda podem deixar comentários que outros possam ver. issues.lock.notice_3=- Pode sempre voltar a desbloquear esta questão no futuro. -issues.unlock.notice_1=- Todos poderiam comentar mais uma vez nesta questão. +issues.unlock.notice_1=- Todos poderão voltar a comentar nesta questão. issues.unlock.notice_2=- Pode sempre voltar a bloquear esta questão no futuro. issues.lock.reason=Motivo do bloqueio issues.lock.title=Bloquear diálogo sobre esta questão. @@ -1189,30 +1292,31 @@ issues.add_time=Adicionar tempo manualmente issues.del_time=Eliminar este registo de tempo issues.add_time_short=Adicionar tempo issues.add_time_cancel=Cancelar -issues.add_time_history=`adicionou o tempo gasto %s` -issues.del_time_history=`apagou o tempo gasto %s` +issues.add_time_history=`adicionou tempo gasto nesta questão %s` +issues.del_time_history=`eliminou o tempo gasto nesta questão %s` issues.add_time_hours=Horas issues.add_time_minutes=Minutos issues.add_time_sum_to_small=Não foi inserido qualquer tempo. issues.time_spent_total=Total de tempo gasto -issues.time_spent_from_all_authors=`Tempo total gasto: %s` -issues.due_date=Date limite -issues.invalid_due_date_format=O formato da data limite tem que ser 'aaaa-mm-dd'. -issues.error_modifying_due_date=Falha ao modificar a data limite. -issues.error_removing_due_date=Falha ao remover a data limite. +issues.time_spent_from_all_authors=`Total de tempo gasto: %s` +issues.due_date=Data de vencimento +issues.invalid_due_date_format=O formato da data de vencimento tem que ser 'aaaa-mm-dd'. +issues.error_modifying_due_date=Falhou a modificação da data de vencimento. +issues.error_removing_due_date=Falhou a remoção da data de vencimento. issues.push_commit_1=adicionou %d cometimento %s issues.push_commits_n=adicionou %d cometimentos %s +issues.force_push_codes=`forçou o envio %[1]s de %[2]s para %[4]s %[6]s` issues.due_date_form=yyyy-mm-dd -issues.due_date_form_add=Adicionar data limite +issues.due_date_form_add=Adicionar data de vencimento issues.due_date_form_edit=Editar issues.due_date_form_remove=Remover -issues.due_date_not_writer=Tem que ter permissões de escrita no repositório para poder modificar o prazo de uma questão. -issues.due_date_not_set=Sem data limite definida. -issues.due_date_added=adicionou a data limite %s %s -issues.due_date_modified=modificou a data limite de %[2]s para %[1]s %[3]s -issues.due_date_remove=removeu a data limite %s %s +issues.due_date_not_writer=Tem que ter permissões de escrita no repositório para poder modificar a data de vencimento de uma questão. +issues.due_date_not_set=Sem data de vencimento definida. +issues.due_date_added=adicionou a data de vencimento %s %s +issues.due_date_modified=modificou a data de vencimento de %[2]s para %[1]s %[3]s +issues.due_date_remove=removeu a data de vencimento %s %s issues.due_date_overdue=Em atraso -issues.due_date_invalid=A data limite é inválida ou está fora do intervalo permitido. Por favor, use o formato 'aaaa-mm-dd'. +issues.due_date_invalid=A data de vencimento é inválida ou está fora do intervalo permitido. Por favor, use o formato 'aaaa-mm-dd'. issues.dependency.title=Dependências issues.dependency.issue_no_dependencies=Esta questão não tem quaisquer dependências, neste momento. issues.dependency.pr_no_dependencies=Este pedido de integração não tem quaisquer dependências, neste momento. @@ -1241,14 +1345,14 @@ issues.dependency.add_error_dep_exists=A dependência já existe. issues.dependency.add_error_cannot_create_circular=Não pode criar uma dependência onde duas questões se bloqueiam simultaneamente. issues.dependency.add_error_dep_not_same_repo=Ambas as questões têm que estar no mesmo repositório. issues.review.self.approval=Não pode aprovar o seu próprio pedido de integração. -issues.review.self.rejection=Não pode solicitar alterações sobre o seu próprio pedido de integração. -issues.review.approve=aprovou estas alterações %s +issues.review.self.rejection=Não pode solicitar modificações sobre o seu próprio pedido de integração. +issues.review.approve=aprovou estas modificações %s issues.review.comment=reviu %s issues.review.dismissed=descartou a revisão de %s %s issues.review.dismissed_label=Descartada issues.review.left_comment=deixou um comentário -issues.review.content.empty=Tem que deixar um comentário indicando a(s) alteração(ões) solicitada(s). -issues.review.reject=alterações solicitadas %s +issues.review.content.empty=Tem que deixar um comentário indicando a(s) modificação(ões) solicitada(s). +issues.review.reject=modificações solicitadas %s issues.review.wait=foi solicitada para revisão %s issues.review.add_review_request=solicitou revisão de %s %s issues.review.remove_review_request=removeu a solicitação de revisão para %s %s @@ -1267,47 +1371,53 @@ issues.review.resolved_by=marcou este diálogo como estando resolvido issues.assignee.error=Nem todos os responsáveis foram adicionados devido a um erro inesperado. issues.reference_issue.body=Conteúdo +compare.compare_base=base +compare.compare_head=comparar + pulls.desc=Habilitar pedidos de integração e revisão de código. pulls.new=Novo pedido de integração pulls.compare_changes=Novo pedido de integração pulls.compare_changes_desc=Escolha o ramo de destino e o ramo de origem. pulls.compare_base=integrar em -pulls.compare_compare=integrar a partir de +pulls.compare_compare=puxar de pulls.filter_branch=Filtrar ramo pulls.no_results=Não foram encontrados quaisquer resultados. pulls.nothing_to_compare=Estes ramos são iguais. Não há necessidade de criar um pedido de integração. pulls.nothing_to_compare_and_allow_empty_pr=Estes ramos são iguais. Este pedido de integração ficará vazio. pulls.has_pull_request=`Já existe um pedido de integração entre estes dois ramos: %[2]s#%[3]d` pulls.create=Criar um pedido de integração -pulls.title_desc=quer integrar %[1]d cometimentos de %[2] em %[3]s -pulls.merged_title_desc=integrou %[1]d cometimentos de %[2] em %[3]s %[4]s +pulls.title_desc=quer integrar %[1]d cometimento(s) do ramo %[2]s no ramo %[3]s +pulls.merged_title_desc=integrou %[1]d cometimento(s) do ramo %[2]s no ramo %[3]s %[4]s pulls.change_target_branch_at=`mudou o ramo de destino de %s para %s %s` pulls.tab_conversation=Diálogo pulls.tab_commits=Cometimentos pulls.tab_files=Ficheiros modificados pulls.reopen_to_merge=Reabra este pedido de integração para executar uma integração. pulls.cant_reopen_deleted_branch=Este pedido de integração não pode ser reaberto porque o ramo foi eliminado. -pulls.merged=Integração executada -pulls.merged_as=A integração constante no pedido foi executada como %[2]s. +pulls.merged=Integrado +pulls.merged_as=A integração foi executada no cometimento %[2]s. pulls.manually_merged=Integrado manualmente -pulls.manually_merged_as=A integração constante neste pedido foi executada manualmente como %[2]s. +pulls.manually_merged_as=A integração foi executada manualmente no cometimento %[2]s. pulls.is_closed=O pedido de integração foi fechado. -pulls.has_merged=A integração constante no pedido foi executada. -pulls.title_wip_desc=`Inicie o título com %s para evitar que o pedido de integração seja executado acidentalmente.` -pulls.cannot_merge_work_in_progress=Este pedido de integração está marcado como um trabalho em andamento. Remova o prefixo %s do título quando estiver pronto +pulls.has_merged=A integração foi executada. +pulls.title_wip_desc=`Inicie o título com %s para evitar que a integração seja executada acidentalmente.` +pulls.cannot_merge_work_in_progress=Este pedido de integração está marcado como trabalho em andamento. +pulls.still_in_progress=Ainda em andamento? +pulls.add_prefix=Adicione o prefixo %s +pulls.remove_prefix=Remover o prefixo %s pulls.data_broken=Este pedido de integração está danificado devido à falta de informação da derivação. -pulls.files_conflicted=Este pedido de integração contém alterações que entram em conflito com o ramo de destino. +pulls.files_conflicted=Este pedido de integração contém modificações que entram em conflito com o ramo de destino. pulls.is_checking=Está em andamento uma verificação de conflitos na integração. Tente novamente daqui a alguns momentos. pulls.is_empty=Este ramo é igual ao ramo de destino. pulls.required_status_check_failed=Algumas das verificações obrigatórias não foram bem sucedidas. pulls.required_status_check_missing=Estão faltando algumas verificações necessárias. pulls.required_status_check_administrator=Uma vez que é administrador, ainda pode realizar a integração deste pedido. pulls.blocked_by_approvals=Este pedido de integração ainda não tem aprovações suficientes. Já foram concedidas %d de um total de%d aprovações. -pulls.blocked_by_rejection=Este pedido de integração tem alterações solicitadas por um revisor oficial. -pulls.blocked_by_official_review_requests=Este Pedido de Integração tem pedidos de revisão oficiais. +pulls.blocked_by_rejection=Este pedido de integração tem modificações solicitadas por um revisor oficial. +pulls.blocked_by_official_review_requests=Este pedido de integração tem pedidos de revisão oficiais. pulls.blocked_by_outdated_branch=Este pedido de integração foi bloqueado por ser obsoleto. -pulls.blocked_by_changed_protected_files_1=Este pedido de integração está bloqueado porque altera um ficheiro protegido: -pulls.blocked_by_changed_protected_files_n=Este pedido de integração está bloqueado porque altera ficheiros protegidos: +pulls.blocked_by_changed_protected_files_1=Este pedido de integração está bloqueado porque modifica um ficheiro protegido: +pulls.blocked_by_changed_protected_files_n=Este pedido de integração está bloqueado porque modifica ficheiros protegidos: pulls.can_auto_merge_desc=A integração constante neste pedido pode ser executada automaticamente. pulls.cannot_auto_merge_desc=A integração constante neste pedido não pode ser executada automaticamente porque existem conflitos. pulls.cannot_auto_merge_helper=Faça a integração manualmente para resolver os conflitos. @@ -1315,18 +1425,18 @@ pulls.num_conflicting_files_1=%d ficheiro em conflito pulls.num_conflicting_files_n=%d ficheiros em conflito pulls.approve_count_1=%d aprovação pulls.approve_count_n=%d aprovações -pulls.reject_count_1=%d pedido de alteração -pulls.reject_count_n=%d pedidos de alteração +pulls.reject_count_1=%d pedido de modificação +pulls.reject_count_n=%d pedidos de modificação pulls.waiting_count_1=%d revisão pendente pulls.waiting_count_n=%d revisões pendentes pulls.wrong_commit_id=ID do cometimento tem que ser um ID de cometimento no ramo de destino pulls.no_merge_desc=A integração constante neste pedido não pode ser executada porque todas as opções de integração do repositório estão desabilitadas. -pulls.no_merge_helper=Habilite as opções de integração nas configurações do repositório ou faça manualmente a integração constante no pedido. +pulls.no_merge_helper=Habilite as opções de integração nas configurações do repositório ou faça a integração manualmente. pulls.no_merge_wip=A integração constante neste pedido não pode ser executada porque está marcada como sendo trabalho em andamento. pulls.no_merge_not_ready=A integração constante neste pedido não pode ser executada. Verifique o estado da revisão e as verificações de estado. pulls.no_merge_access=Não tem autorização para executar a integração constante neste pedido. -pulls.merge_pull_request=Executar a integração constante no pedido +pulls.merge_pull_request=Executar a integração pulls.rebase_merge_pull_request=Mudar a base e integrar pulls.rebase_merge_commit_pull_request=Mudar a base e integrar (--no-ff) pulls.squash_merge_pull_request=Comprimir e integrar @@ -1339,11 +1449,11 @@ pulls.merge_conflict_summary=Mensagem de erro pulls.rebase_conflict=A integração falhou: Houve um conflito durante a mudança de base do cometimento %[1]s. Dica: Tente uma estratégia diferente pulls.rebase_conflict_summary=Mensagem de erro ; %[2]s
    %[3]s
    -pulls.unrelated_histories=Integração falhada: A cabeça da integração e a base não partilham um histórico comum. Dica: Tente uma estratégia diferente +pulls.unrelated_histories=A integração falhou: A cabeça da integração e a base não partilham um histórico comum. Dica: Tente uma estratégia diferente pulls.merge_out_of_date=Falhou a integração: Enquanto estava a gerar a integração, a base foi modificada. Dica: Tente de novo. pulls.push_rejected=A integração falhou: O envio foi rejeitado. Reveja os automatismos do Git neste repositório. pulls.push_rejected_summary=Mensagem completa de rejeição -pulls.push_rejected_no_message=Integração falhada: O envio foi rejeitado mas não houve qualquer mensagem remota.
    Reveja os automatismos do git para este repositório +pulls.push_rejected_no_message=A integração falhou: O envio foi rejeitado mas não houve qualquer mensagem remota.
    Reveja os automatismos do git para este repositório pulls.open_unmerged_pull_exists=`Não pode executar uma operação de reabertura porque há um pedido de integração pendente (#%d) com propriedades idênticas.` pulls.status_checking=Algumas verificações estão pendentes pulls.status_checks_success=Todas as verificações foram bem sucedidas @@ -1360,15 +1470,15 @@ pulls.closed_at=`fechou este pedido de integração pulls.reopened_at=`reabriu este pedido de integração %[2]s` pulls.merge_instruction_hint=`Também pode ver as instruções para a linha de comandos.` -pulls.merge_instruction_step1_desc=No repositório do seu projecto, crie um novo ramo e teste as alterações. -pulls.merge_instruction_step2_desc=Integre as alterações e sincronize no Gitea. +pulls.merge_instruction_step1_desc=No repositório do seu projecto, crie um novo ramo e teste as modificações. +pulls.merge_instruction_step2_desc=Integre as modificações e sincronize no Gitea. milestones.new=Nova etapa milestones.open_tab=%d abertas milestones.close_tab=%d fechadas milestones.closed=Encerrada %s milestones.update_ago=Modificada há %s -milestones.no_due_date=Sem data limite +milestones.no_due_date=Sem data de vencimento milestones.open=Abrir milestones.close=Fechar milestones.new_subheader=As etapas organizam as questões e acompanham o progresso. @@ -1376,9 +1486,9 @@ milestones.completeness=%d%% concluído milestones.create=Criar etapa milestones.title=Título milestones.desc=Descrição -milestones.due_date=Data limite (opcional) +milestones.due_date=Data de vencimento (opcional) milestones.clear=Limpar -milestones.invalid_due_date_format=O formato da data limite tem que ser 'aaaa-mm-dd'. +milestones.invalid_due_date_format=O formato da data de vencimento tem que ser 'aaaa-mm-dd'. milestones.create_success=A etapa '%s' foi criada. milestones.edit=Editar etapa milestones.edit_subheader=As etapas organizam as questões e acompanham o progresso. @@ -1388,8 +1498,8 @@ milestones.edit_success=A etapa '%s' foi modificada. milestones.deletion=Eliminar etapa milestones.deletion_desc=Se eliminar uma etapa, irá removê-la de todas as questões relacionadas. Quer continuar? milestones.deletion_success=A etapa foi eliminada. -milestones.filter_sort.closest_due_date=Data limite mais próxima -milestones.filter_sort.furthest_due_date=Data limite mais distante +milestones.filter_sort.closest_due_date=Data de vencimento mais próxima +milestones.filter_sort.furthest_due_date=Data de vencimento mais distante milestones.filter_sort.least_complete=Menos completo milestones.filter_sort.most_complete=Mais completo milestones.filter_sort.most_issues=Mais questões @@ -1461,19 +1571,19 @@ activity.merged_prs_label=Integrados activity.opened_prs_label=Propostos activity.active_issues_count_1=%d questão vigente activity.active_issues_count_n=%d questões vigentes -activity.closed_issues_count_1=Questão encerrada -activity.closed_issues_count_n=Questões encerradas +activity.closed_issues_count_1=questão encerrada +activity.closed_issues_count_n=questões encerradas activity.title.issues_1=%d questão activity.title.issues_n=%d questões activity.title.issues_closed_from=%s resolvidas de %s activity.title.issues_created_by=%s criada por %s activity.closed_issue_label=Encerrada -activity.new_issues_count_1=Nova questão -activity.new_issues_count_n=Novas questões +activity.new_issues_count_1=questão nova +activity.new_issues_count_n=questões novas activity.new_issue_label=Em aberto activity.title.unresolved_conv_1=%d diálogo não resolvido activity.title.unresolved_conv_n=%d diálogos não resolvidos -activity.unresolved_conv_desc=Estas questões e estes pedidos de integração que foram alterados recentemente ainda não foram resolvidos. +activity.unresolved_conv_desc=Estas questões e estes pedidos de integração que foram modificados recentemente ainda não foram resolvidos. activity.unresolved_conv_label=Em aberto activity.title.releases_1=%d lançamento activity.title.releases_n=%d Lançamentos @@ -1492,8 +1602,8 @@ activity.git_stats_push_to_all_branches=para todos os ramos. activity.git_stats_on_default_branch=No ramo %s, activity.git_stats_file_1=%d ficheiro activity.git_stats_file_n=%d ficheiros -activity.git_stats_files_changed_1=foi alterado -activity.git_stats_files_changed_n=foram alterados +activity.git_stats_files_changed_1=foi modificado +activity.git_stats_files_changed_n=foram modificados activity.git_stats_additions=e houve activity.git_stats_addition_1=%d adição activity.git_stats_addition_n=%d adições @@ -1520,6 +1630,15 @@ settings.hooks=Automatismos web settings.githooks=Automatismos do Git settings.basic_settings=Configurações básicas settings.mirror_settings=Configurações do espelhamento +settings.mirror_settings.docs=Configure o seu repositório para puxar e/ou enviar automaticamente as modificações de/para outro repositório. Ramos, etiquetas e cometimentos serão sincronizados automaticamente. Como é que eu faço um espelho de outro repositório? +settings.mirror_settings.mirrored_repository=Repositório espelhado +settings.mirror_settings.direction=Sentido +settings.mirror_settings.direction.pull=Puxada +settings.mirror_settings.direction.push=Envio +settings.mirror_settings.last_update=Última modificação +settings.mirror_settings.push_mirror.none=Não foram configurados quaisquer espelhos de envio +settings.mirror_settings.push_mirror.remote_url=URL do repositório remoto Git +settings.mirror_settings.push_mirror.add=Adicionar espelho de envio settings.sync_mirror=Sincronizar agora settings.mirror_sync_in_progress=A sincronização do espelho está em andamento. Volte a verificar daqui a um minuto. settings.email_notifications.enable=Habilitar notificações por email @@ -1528,6 +1647,7 @@ settings.email_notifications.disable=Desabilitar notificações por email settings.email_notifications.submit=Definir preferência do email settings.site=Sítio web settings.update_settings=Modificar configurações +settings.branches.update_default_branch=Modificar o ramo principal settings.advanced_settings=Configurações avançadas settings.wiki_desc=Habilitar wiki do repositório settings.use_internal_wiki=Usar o wiki nativo @@ -1557,10 +1677,11 @@ settings.pulls.allow_rebase_merge_commit=Habilitar mudança de base com cometime settings.pulls.allow_squash_commits=Habilitar cometimentos de condensação para integrar settings.pulls.allow_manual_merge=Habilitar a marcação dos pedidos de integração como tendo sido executados manualmente settings.pulls.enable_autodetect_manual_merge=Habilitar a identificação automática de integrações manuais (obs.: nalguns casos especiais a avaliação pode ser errada) +settings.pulls.default_delete_branch_after_merge=Eliminar o ramo do pedido de integração depois de finalizada a integração, como predefinição settings.projects_desc=Habilitar projectos no repositório settings.admin_settings=Configurações do administrador settings.admin_enable_health_check=Habilitar verificações de integridade (git fsck) no repositório -settings.admin_enable_close_issues_via_commit_in_any_branch=Fechar uma questão através de um cometimento feito num ramo não padrão +settings.admin_enable_close_issues_via_commit_in_any_branch=Fechar uma questão através de um cometimento feito num ramo que não seja o principal settings.danger_zone=Zona de perigo settings.new_owner_has_same_repo=O novo dono já tem um repositório com o mesmo nome. Por favor, escolha outro nome. settings.convert=Converter para um repositório normal @@ -1584,11 +1705,12 @@ settings.transfer_form_title=Insira o nome do repositório para confirmar: settings.transfer_in_progress=Está a ser feita uma transferência. Cancele-a, por favor, se quiser transferir este repositório para outro utilizador. settings.transfer_notices_1=- Você perderá o acesso ao repositório se transferir para um utilizador individual. settings.transfer_notices_2=- Você manterá o acesso ao repositório se o transferir para uma organização da qual você é (co-)proprietário. +settings.transfer_notices_3=- Se o repositório for privado e for transferido para um utilizador individual, esta operação certifica que o utilizador tem pelo menos a permissão de leitura (e altera as permissões se for necessário). settings.transfer_owner=Novo proprietário settings.transfer_perform=Executar transferência settings.transfer_started=Este repositório foi marcado para ser transferido e aguarda a confirmação de "%s" settings.transfer_succeed=O repositório foi transferido. -settings.signing_settings=Configuração da verificação de assinaturas +settings.signing_settings=Configuração da validação de assinaturas settings.trust_model=Modelo de confiança na assinatura settings.trust_model.default=Modelo de confiança padrão settings.trust_model.default.desc=Usar o modelo de confiança padrão do repositório para esta instalação. @@ -1631,7 +1753,7 @@ settings.add_team=Adicionar equipa settings.add_team_duplicate=A equipa já tem o repositório settings.add_team_success=A equipa agora tem acesso ao repositório. settings.search_team=Procurar equipa… -settings.change_team_permission_tip=A permissão da equipa é definida na página de configurações da equipa e não pode ter alterações específicas de cada repositório +settings.change_team_permission_tip=A permissão da equipa é definida na página de configurações da equipa e não pode ter modificações específicas de cada repositório settings.delete_team_tip=Esta equipa tem acesso a todos os repositórios e não pode ser removida settings.remove_team_success=O acesso da equipa ao repositório foi removido. settings.add_webhook=Adicionar automatismo web @@ -1677,7 +1799,7 @@ settings.event_fork_desc=Feita a derivação do repositório. settings.event_release=Lançamento settings.event_release_desc=Lançamento publicado, modificado ou eliminado num repositório. settings.event_push=Enviar -settings.event_push_desc=Envio de Git para um repositório. +settings.event_push_desc=Envio do Git para um repositório. settings.event_repository=Repositório settings.event_repository_desc=Repositório criado ou eliminado. settings.event_header_issue=Eventos da questão @@ -1707,7 +1829,7 @@ settings.event_pull_request_review_desc=Pedido de integração aprovado, rejeita settings.event_pull_request_sync=Pedido de integração sincronizado settings.event_pull_request_sync_desc=Pedido de integração sincronizado. settings.branch_filter=Filtro por ramo -settings.branch_filter_desc=Lista branca para eventos de envio e de criação e eliminação de ramos, especificada como um padrão glob. Se estiver em branco ou for *, serão reportados eventos para todos os ramos. Veja a documentação github.com/gobwas/glob para detalhes da sintaxe. Exemplos: master, {master,release*}. +settings.branch_filter_desc=Lista de permissões do ramo para eventos de envio e de criação e eliminação de ramos, especificada como um padrão glob. Se estiver em branco ou for *, serão reportados eventos para todos os ramos. Veja a documentação github.com/gobwas/glob para ver os detalhes da sintaxe. Exemplos: trunk, {trunk,release*}. settings.active=Em funcionamento settings.active_helper=Informação sobre eventos despoletados será enviada para o URL deste automatismo web. settings.add_hook_success=O automatismo web foi adicionado. @@ -1728,9 +1850,9 @@ settings.add_msteams_hook_desc=Integrar Microsoft Teams no seu settings.add_feishu_hook_desc=Integrar Feishu no seu repositório. settings.deploy_keys=Chaves de instalação settings.add_deploy_key=Adicionar chave de instalação -settings.deploy_key_desc=Chaves de instalação têm acesso apenas de leitura ao repositório. +settings.deploy_key_desc=Chaves de instalação têm acesso para puxar do repositório apenas em modo de leitura. settings.is_writable=Habilitar acesso de escrita -settings.is_writable_info=Permitir que esta chave de instalação envie para o repositório. +settings.is_writable_info=Permitir a esta chave de instalação enviar para o repositório. settings.no_deploy_keys=Ainda não existem quaisquer chaves de instalação. settings.title=Título settings.deploy_key_content=Conteúdo @@ -1742,18 +1864,18 @@ settings.deploy_key_deletion_desc=Remover uma chave de instalação irá revogar settings.deploy_key_deletion_success=A chave de instalação foi removida. settings.branches=Ramos settings.protected_branch=Salvaguarda do ramo -settings.protected_branch_can_push=Permitir envio? +settings.protected_branch_can_push=Permitir envios? settings.protected_branch_can_push_yes=Pode enviar settings.protected_branch_can_push_no=Não pode enviar settings.branch_protection=Salvaguarda do ramo '%s' settings.protect_this_branch=Habilitar salvaguarda do ramo -settings.protect_this_branch_desc=Impede a eliminação e restringe o envio e integração do Git no ramo. -settings.protect_disable_push=Desabilitar envio +settings.protect_this_branch_desc=Impede a eliminação e restringe envios e integrações do Git no ramo. +settings.protect_disable_push=Desabilitar envios settings.protect_disable_push_desc=O envio para este ramo não será permitido. -settings.protect_enable_push=Habilitar envio +settings.protect_enable_push=Habilitar envios settings.protect_enable_push_desc=Qualquer utilizador com acesso de escrita terá permissão para enviar para este ramo (mas não poderá fazer envios forçados). -settings.protect_whitelist_committers=Lista de permissão restrita para envio -settings.protect_whitelist_committers_desc=Apenas os utilizadores ou equipas da lista terão permissão para enviar para este ramo (mas não poderão fazer envios forçados). +settings.protect_whitelist_committers=Lista de permissões para restringir os envios +settings.protect_whitelist_committers_desc=Apenas os utilizadores ou equipas constantes na lista terão permissão para enviar para este ramo (mas não poderão fazer envios forçados). settings.protect_whitelist_deploy_keys=Dar permissão às chaves de instalação para terem acesso de escrita para enviar. settings.protect_whitelist_users=Utilizadores com permissão para enviar: settings.protect_whitelist_search_users=Procurar utilizadores… @@ -1773,11 +1895,11 @@ settings.protect_approvals_whitelist_enabled_desc=Somente as revisões dos utili settings.protect_approvals_whitelist_users=Revisores com permissão: settings.protect_approvals_whitelist_teams=Equipas com permissão para rever: settings.dismiss_stale_approvals=Descartar aprovações obsoletas -settings.dismiss_stale_approvals_desc=Quando novos cometimentos que mudam o conteúdo do pedido de integração são enviados para o ramo, as aprovações antigas serão descartadas. +settings.dismiss_stale_approvals_desc=Quando novos cometimentos que mudam o conteúdo do pedido de integração forem enviados para o ramo, as aprovações antigas serão descartadas. settings.require_signed_commits=Exigir cometimentos assinados -settings.require_signed_commits_desc=Rejeitar envios para este ramo se não estiverem assinados ou não forem verificáveis. +settings.require_signed_commits_desc=Rejeitar envios para este ramo que não estejam assinados ou que não sejam validáveis. settings.protect_protected_file_patterns=Padrões de ficheiros protegidos (separados com ponto e vírgula '\;'): -settings.protect_protected_file_patterns_desc=Ficheiros protegidos que não podem ser alterados, mesmo que o utilizador tenha direitos para adicionar, editar ou eliminar ficheiros neste ramo. Múltiplos padrões podem ser separados com ponto e vírgula ('\;'). Veja a documentação em github.com/gobwas/glob para ver a sintaxe. Exemplos: .drone.yml, /docs/**/*.txt. +settings.protect_protected_file_patterns_desc=Ficheiros protegidos que não podem ser modificados, mesmo que o utilizador tenha direitos para adicionar, editar ou eliminar ficheiros neste ramo. Múltiplos padrões podem ser separados com ponto e vírgula ('\;'). Veja a documentação em github.com/gobwas/glob para ver a sintaxe. Exemplos: .drone.yml, /docs/**/*.txt. settings.add_protected_branch=Habilitar salvaguarda settings.delete_protected_branch=Desabilitar salvaguarda settings.update_protect_branch_success=A salvaguarda do ramo '%s' foi modificada. @@ -1785,17 +1907,27 @@ settings.remove_protected_branch_success=A salvaguarda do ramo '%s' foi desabili settings.protected_branch_deletion=Desabilitar salvaguarda do ramo settings.protected_branch_deletion_desc=Desabilitar a salvaguarda do ramo irá permitir que os utilizadores que tenham permissão de escrita enviem para o ramo. Quer continuar? settings.block_rejected_reviews=Bloquear a integração quando há revisões rejeitadas -settings.block_rejected_reviews_desc=A integração não será possível quando as alterações forem pedidas pelos revisores oficiais, mesmo que haja aprovações suficientes. +settings.block_rejected_reviews_desc=A integração não será possível quando as modificações forem pedidas pelos revisores oficiais, mesmo que haja aprovações suficientes. settings.block_on_official_review_requests=Bloquear integração nos pedidos de revisão oficiais settings.block_on_official_review_requests_desc=A integração não será possível quando tiver pedidos de revisão oficiais, mesmo que haja aprovações suficientes. settings.block_outdated_branch=Bloquear integração se o pedido de integração for obsoleto settings.block_outdated_branch_desc=A integração não será possível quando o ramo de topo estiver abaixo do ramo base. -settings.default_branch_desc=Escolha um ramo padrão do repositório para pedidos de integração e cometimentos: +settings.default_branch_desc=Escolha um ramo do repositório como sendo o predefinido para pedidos de integração e cometimentos: settings.default_merge_style_desc=Tipo de integração predefinido para pedidos de integração: settings.choose_branch=Escolha um ramo… settings.no_protected_branch=Não existem ramos protegidos. settings.edit_protected_branch=Editar settings.protected_branch_required_approvals_min=O número mínimo exigido de aprovações não pode ser negativo. +settings.tags=Etiquetas +settings.tags.protection=Proteger etiquetas +settings.tags.protection.pattern=Padrão das etiquetas +settings.tags.protection.allowed=Com permissão +settings.tags.protection.allowed.users=Utilizadores com permissão +settings.tags.protection.allowed.teams=Equipas com permissão +settings.tags.protection.allowed.noone=Ninguém +settings.tags.protection.create=Proteger etiqueta +settings.tags.protection.none=Não há etiquetas protegidas. +settings.tags.protection.pattern.description=Pode usar um só nome ou um padrão glob ou uma expressão regular para corresponder a várias etiquetas. Para mais informações leia o guia das etiquetas protegidas. settings.bot_token=Código do bot settings.chat_id=ID do diálogo settings.matrix.homeserver_url=URL do servidor caseiro @@ -1804,11 +1936,12 @@ settings.matrix.access_token=Código de acesso settings.matrix.message_type=Tipo de mensagem settings.archive.button=Arquivar repositório settings.archive.header=Arquivar este repositório -settings.archive.text=Arquivar um repositório fará com que seja inteiramente de leitura. Não estará visível no painel de controlo, não poderá receber cometimentos e não será possível criar questões ou pedidos de integração. +settings.archive.text=Arquivar um repositório fará com que fique inteira e exclusivamente de leitura. Não ficará visível no painel de controlo, não poderá receber cometimentos e não será possível criar questões ou pedidos de integração. settings.archive.success=O repositório foi arquivado com sucesso. settings.archive.error=Ocorreu um erro enquanto decorria o processo de arquivo do repositório. Veja os registo para obter mais detalhes. settings.archive.error_ismirror=Não pode arquivar um repositório que tenha sido espelhado. settings.archive.branchsettings_unavailable=As configurações dos ramos não estão disponíveis quando o repositório está arquivado. +settings.archive.tagsettings_unavailable=As configurações sobre etiquetas não estão disponíveis quando o repositório está arquivado. settings.unarchive.button=Desarquivar repositório settings.unarchive.header=Desarquivar este repositório settings.unarchive.text=Desarquivar o repositório irá restaurar a capacidade de receber cometimentos e envios, assim como novas questões e pedidos de integração. @@ -1820,7 +1953,7 @@ settings.lfs_filelist=Ficheiros LFS armazenados neste repositório settings.lfs_no_lfs_files=Não existem quaisquer ficheiros LFS armazenados neste repositório settings.lfs_findcommits=Procurar cometimentos settings.lfs_lfs_file_no_commits=Não foram encontrados quaisquer cometimentos para este ficheiro LFS -settings.lfs_noattribute=Este caminho não tem o atributo bloqueável no ramo padrão +settings.lfs_noattribute=Este caminho não tem o atributo bloqueável no ramo principal settings.lfs_delete=Eliminar ficheiro LFS com o OID %s settings.lfs_delete_warning=Eliminar um ficheiro LFS pode causar erros do tipo 'elemento não existe' no checkout. Tem a certeza? settings.lfs_findpointerfiles=Procurar ficheiros apontadores @@ -1831,7 +1964,7 @@ settings.lfs_lock_already_exists=Já existe um bloqueio: %s settings.lfs_lock=Bloquear settings.lfs_lock_path=Caminho de ficheiro a bloquear... settings.lfs_locks_no_locks=Sem bloqueios -settings.lfs_lock_file_no_exist=O ficheiro bloqueado não existe no ramo padrão +settings.lfs_lock_file_no_exist=O ficheiro bloqueado não existe no ramo principal settings.lfs_force_unlock=Forçar desbloqueio settings.lfs_pointers.found=Encontrado(s) %d ponteiro(s) de blob - %d associado(a), %d desassociado(a) (%d ausente do armazenamento) settings.lfs_pointers.sha=SHA do blob @@ -1853,13 +1986,14 @@ diff.download_diff=Descarregar ficheiro diff diff.show_split_view=Visualização em 2 colunas diff.show_unified_view=Visualização unificada diff.whitespace_button=Espaço em branco -diff.whitespace_show_everything=Mostrar todas as alterações +diff.whitespace_show_everything=Mostrar todas as modificações diff.whitespace_ignore_all_whitespace=Ignorar espaço em branco ao comparar linhas -diff.whitespace_ignore_amount_changes=Ignorar alterações na quantidade de espaço em branco -diff.whitespace_ignore_at_eol=Ignorar alterações do espaço em branco no fim das linhas -diff.stats_desc= %d ficheiros alterados com %d adições e %d eliminações -diff.stats_desc_file=%d alterações: %d adições e %d exclusões +diff.whitespace_ignore_amount_changes=Ignorar modificações na quantidade de espaço em branco +diff.whitespace_ignore_at_eol=Ignorar modificações do espaço em branco no fim das linhas +diff.stats_desc= %d ficheiros modificados com %d adições e %d eliminações +diff.stats_desc_file=%d modificações: %d adições e %d exclusões diff.bin=BIN +diff.bin_not_shown=Ficheiro binário não mostrado. diff.view_file=Ver ficheiro diff.file_before=Antes diff.file_after=Depois @@ -1867,7 +2001,8 @@ diff.file_image_width=Largura diff.file_image_height=Altura diff.file_byte_size=Tamanho diff.file_suppressed=A apresentação das diferenças no ficheiro foi suprimida por ser demasiado grande -diff.too_many_files=Alguns ficheiros não foram mostrados porque foram alterados demasiados ficheiros neste diff +diff.file_suppressed_line_too_long=A apresentação das diferenças entre ficheiros foi suprimida porque há linhas demasiado longas +diff.too_many_files=Alguns ficheiros não foram mostrados porque foram modificados demasiados ficheiros neste diff diff.comment.placeholder=Deixar um comentário diff.comment.markdown_info=A formatação com markdown é suportada. diff.comment.add_single_comment=Adicionar um único comentário @@ -1879,7 +2014,7 @@ diff.review.header=Submeter revisão diff.review.placeholder=Comentário da revisão diff.review.comment=Comentar diff.review.approve=Aprovar -diff.review.reject=Solicitar alterações +diff.review.reject=Solicitar modificações diff.committed_by=cometido por diff.protected=Protegido diff.image.side_by_side=Lado a Lado @@ -1894,6 +2029,7 @@ release.new_release=Novo lançamento release.draft=Rascunho release.prerelease=Pré-lançamento release.stable=Estável +release.compare=Comparar release.edit=editar release.ahead.commits=%d cometimentos release.ahead.target=para %s desde este lançamento @@ -1920,6 +2056,7 @@ release.deletion_tag_desc=Esta etiqueta vai ser eliminada do repositório. O con release.deletion_tag_success=A etiqueta foi eliminada. release.tag_name_already_exist=Já existe um lançamento com esta etiqueta. release.tag_name_invalid=A etiqueta não é válida. +release.tag_name_protected=O nome da etiqueta está protegido. release.tag_already_exist=Este nome de etiqueta já existe. release.downloads=Descargas release.download_count=Descargas: %s @@ -1946,11 +2083,15 @@ branch.deleted_by=Eliminado por %s branch.restore_success=O ramo '%s' foi restaurado. branch.restore_failed=Falhou a restauração do ramo '%s'. branch.protected_deletion_failed=O ramo '%s' está protegido, não pode ser eliminado. -branch.default_deletion_failed=O ramo '%s' é o ramo padrão, não pode ser eliminado. +branch.default_deletion_failed=O ramo '%s' é o ramo principal, não pode ser eliminado. branch.restore=Restaurar ramo '%s' branch.download=Descarregar o ramo '%s' -branch.included_desc=Este ramo faz parte do ramo padrão +branch.included_desc=Este ramo faz parte do ramo principal branch.included=Incluído +branch.create_new_branch=Criar ramo a partir do ramo: +branch.confirm_create_branch=Criar ramo +branch.new_branch=Criar um novo ramo +branch.new_branch_from=Criar um novo ramo a partir do ramo '%s' tag.create_tag=Criar etiqueta %s tag.create_success=A etiqueta '%s' foi criada. @@ -1960,6 +2101,9 @@ topic.done=Concluído topic.count_prompt=Não pode escolher mais do que 25 tópicos topic.format_prompt=Os tópicos devem começar com uma letra ou um número, podem incluir traços ('-') e podem ter até 35 caracteres. +error.csv.too_large=Não é possível apresentar este ficheiro por ser demasiado grande. +error.csv.unexpected=Não é possível apresentar este ficheiro porque contém um caractere inesperado na linha %d e coluna %d. +error.csv.invalid_field_count=Não é possível apresentar este ficheiro porque tem um número errado de campos na linha %d. [org] org_name_holder=Nome da organização @@ -2038,7 +2182,7 @@ teams.read_access_helper=Os membros podem ver e clonar os repositórios da equip teams.write_access=Acesso de escrita teams.write_access_helper=Os membros podem ler e enviar para os repositórios da equipa. teams.admin_access=Acesso de administrador -teams.admin_access_helper=Os membros podem enviar para e receber dos repositórios da equipa e adicionar colaboradores a esses repositórios. +teams.admin_access_helper=Os membros podem puxar de, e enviar para os repositórios da equipa e adicionar colaboradores a esses repositórios. teams.no_desc=Esta equipa não tem descrição teams.settings=Configurações teams.owners_permission_desc=Os proprietários têm acesso total a todos os repositórios e têm acesso de administrador à organização. @@ -2050,8 +2194,8 @@ teams.delete_team_title=Eliminar equipa teams.delete_team_desc=Eliminar uma equipa revoga o acesso dos seus membros ao repositório. Quer continuar? teams.delete_team_success=A equipa foi eliminada. teams.read_permission_desc=Esta equipa atribui acesso de leitura: os seus membros podem ver e clonar os repositórios da equipa. -teams.write_permission_desc=Esta equipa atribui acesso de escrita: os seus membros podem ler de e enviar para os repositórios da equipa. -teams.admin_permission_desc=Esta equipa atribui o acesso de administração: os seus membros podem ler de, enviar para e adicionar colaboradores aos repositórios da equipa. +teams.write_permission_desc=Esta equipa atribui acesso de escrita: os seus membros podem ler de, e enviar para os repositórios da equipa. +teams.admin_permission_desc=Esta equipa atribui o acesso de administração: os seus membros podem ler de, enviar para, e adicionar colaboradores aos repositórios da equipa. teams.create_repo_permission_desc=Adicionalmente, esta equipa atribui a permissão de criar repositórios: os seus membros podem criar novos repositórios na organização. teams.repositories=Repositórios da equipa teams.search_repo_placeholder=Procurar repositório… @@ -2068,8 +2212,8 @@ teams.specific_repositories_helper=Os membros só terão acesso a repositórios teams.all_repositories=Todos os repositórios teams.all_repositories_helper=A equipa tem acesso a todos os repositórios. Escolher isto irá adicionar todos os repositórios existentes à equipa. teams.all_repositories_read_permission_desc=Esta equipa atribui o acesso de leitura a todos os repositórios: os seus membros podem ver e clonar os repositórios. -teams.all_repositories_write_permission_desc=Esta equipa atribui o acesso de escrita a todos os repositórios: os seus membros podem ler de e enviar para os repositórios. -teams.all_repositories_admin_permission_desc=Esta equipa atribui o acesso de administração a todos os repositórios: os seus membros podem ler de, enviar para e adicionar colaboradores aos repositórios. +teams.all_repositories_write_permission_desc=Esta equipa atribui o acesso de escrita a todos os repositórios: os seus membros podem ler de, e enviar para os repositórios. +teams.all_repositories_admin_permission_desc=Esta equipa atribui o acesso de administração a todos os repositórios: os seus membros podem ler de, enviar para, e adicionar colaboradores aos repositórios. [admin] dashboard=Painel de controlo @@ -2157,6 +2301,8 @@ dashboard.total_gc_time=Pausa total da recolha de lixo dashboard.total_gc_pause=Pausa total da recolha de lixo dashboard.last_gc_pause=Última pausa da recolha de lixo dashboard.gc_times=Tempos da recolha de lixo +dashboard.delete_old_actions=Eliminar todas as operações antigas da base de dados +dashboard.delete_old_actions.started=Foi iniciado o processo de eliminação de todas as operações antigas da base de dados. users.user_manage_panel=Gestão das contas de utilizadores users.new_account=Criar conta de utilizador @@ -2252,7 +2398,6 @@ auths.host=Servidor auths.port=Porto auths.bind_dn=Vincular DN auths.bind_password=Vincular senha -auths.bind_password_helper=Atenção: Esta senha é armazenada em texto simples. Use uma conta só de leitura, se possível. auths.user_base=Base de pesquisa de utilizador auths.user_dn=DN do utilizador auths.attribute_username=Atributo do nome de utilizador @@ -2269,7 +2414,7 @@ auths.filter=Filtro de utilizador auths.admin_filter=Filtro de administrador auths.restricted_filter=Filtro restrito auths.restricted_filter_helper=Deixe em branco para não definir quaisquer utilizadores como restritos. Use um asterisco ('*') para definir todos os utilizadores que não correspondam ao filtro de administrador como restritos. -auths.verify_group_membership=Verificar afiliação ao grupo no LDAP +auths.verify_group_membership=Validar afiliação ao grupo no LDAP auths.group_search_base=Base DN para a pesquisa de grupos auths.valid_groups_filter=Filtro de grupos válidos auths.group_attribute_list_users=Atributo de grupo que contém a lista de utilizadores @@ -2281,8 +2426,9 @@ auths.smtpport=Porto do SMTP auths.allowed_domains=Domínios permitidos auths.allowed_domains_helper=Deixe em branco para permitir todos os domínios. Separe múltiplos domínios com uma vírgula (','). auths.enable_tls=Habilitar encriptação TLS -auths.skip_tls_verify=Ignorar verificação TLS +auths.skip_tls_verify=Ignorar validação TLS auths.pam_service_name=Nome do Serviço PAM +auths.pam_email_domain=Domínio de email do PAM (opcional) auths.oauth2_provider=Fornecedor OAuth2 auths.oauth2_icon_url=URL do ícone auths.oauth2_clientID=ID do cliente (chave) @@ -2382,6 +2528,7 @@ config.db_path=Caminho config.service_config=Configuração do serviço config.register_email_confirm=Exigir confirmação de email para se inscrever config.disable_register=Desabilitar a auto-inscrição +config.allow_only_internal_registration=Permitir registo somente através do próprio Gitea config.allow_only_external_registration=Permitir a inscrição somente por meio de serviços externos config.enable_openid_signup=Habilitar a auto-inscrição com OpenID config.enable_openid_signin=Habilitar início de sessão com OpenID @@ -2398,13 +2545,13 @@ config.enable_timetracking=Habilitar a contagem de tempo config.default_enable_timetracking=Habilitar, por norma, a contagem do tempo config.default_allow_only_contributors_to_track_time=Permitir a contagem de tempo somente aos contribuidores config.no_reply_address=Domínio dos emails ocultos -config.default_visibility_organization=Visibilidade padrão para as novas organizações +config.default_visibility_organization=Visibilidade predefinida para as novas organizações config.default_enable_dependencies=Habilitar, por norma, dependências nas questões config.webhook_config=Configuração do automatismo web config.queue_length=Tamanho da fila config.deliver_timeout=Prazo da entrega -config.skip_tls_verify=Ignorar verificação TLS +config.skip_tls_verify=Ignorar validação TLS config.mailer_config=Configuração da aplicação SMTP config.mailer_enabled=Habilitado @@ -2418,7 +2565,7 @@ config.mailer_sendmail_args=Argumentos extras para o sendmail config.mailer_sendmail_timeout=Tempo limite do Sendmail config.test_email_placeholder=Email (ex.: teste@exemplo.com) config.send_test_mail=Enviar email de teste -config.test_mail_failed=Ocorreu uma falha ao enviar um email de teste para '%s': %v +config.test_mail_failed=Falhou o envio de um email de teste para '%s': %v config.test_mail_sent=Foi enviado um email de teste para '%s'. config.oauth_config=Configuração OAuth @@ -2453,7 +2600,7 @@ config.git_gc_args=Argumentos da recolha de lixo config.git_migrate_timeout=Prazo da migração config.git_mirror_timeout=Tempo limite do espelhamento config.git_clone_timeout=Prazo da operação de clonagem -config.git_pull_timeout=Prazo da operação de receber +config.git_pull_timeout=Prazo da operação de puxar config.git_gc_timeout=Prazo da operação de recolha de lixo config.log_config=Configuração do registo @@ -2573,7 +2720,7 @@ mirror_sync_push=sincronizou cometimentos para %[3]s%[2]s para %[3]s do espelho mirror_sync_delete=sincronizou e eliminou a referência %[2]s em %[3]s do ficheiro approve_pull_request=`aprovou %s#%[2]s` -reject_pull_request=`sugeriu alterações para %s#%[2]s` +reject_pull_request=`sugeriu modificações para %s#%[2]s` publish_release=`lançou "%[4]s" à %[3]s` review_dismissed=`descartou a revisão de %[4]s para %[3]s#%[2]s` review_dismissed_reason=Motivo: @@ -2602,8 +2749,8 @@ raw_seconds=segundos raw_minutes=minutos [dropzone] -default_message=Largue os ficheiros aqui ou clique aqui para os enviar. -invalid_input_type=Não pode enviar ficheiros deste tipo. +default_message=Largue os ficheiros aqui ou clique aqui para os carregar. +invalid_input_type=Não pode carregar ficheiros deste tipo. file_too_big=O tamanho do ficheiro ({{filesize}} MB) excede o tamanho máximo de ({{maxFilesize}} MB). remove_file=Remover ficheiro @@ -2626,8 +2773,8 @@ error.no_committer_account=Não existe qualquer conta ligada ao endereço de ema error.no_gpg_keys_found=Não foi encontrada uma chave conhecida para esta assinatura, na base de dados error.not_signed_commit=Não é um cometimento assinado error.failed_retrieval_gpg_keys=Falhou ao obter uma chave ligada à conta de quem cometeu -error.probable_bad_signature=AVISO! Embora exista uma chave com este ID na base de dados, ela não verifica este cometimento! Este cometimento é SUSPEITO. -error.probable_bad_default_signature=AVISO! Embora a chave padrão tenha este ID, ela não verifica este cometimento! Este cometimento é SUSPEITO. +error.probable_bad_signature=AVISO! Embora exista uma chave com este ID na base de dados, ela não valida este cometimento! Este cometimento é SUSPEITO. +error.probable_bad_default_signature=AVISO! Embora a chave padrão tenha este ID, ela não valida este cometimento! Este cometimento é SUSPEITO. [units] error.no_unit_allowed_repo=Não tem permissão para aceder a nenhuma parte deste repositório. diff --git a/options/locale/locale_ru-RU.ini b/options/locale/locale_ru-RU.ini index 8a3b34d0a961..103abcf31220 100644 --- a/options/locale/locale_ru-RU.ini +++ b/options/locale/locale_ru-RU.ini @@ -83,6 +83,7 @@ add=Добавить add_all=Добавить все remove=Удалить remove_all=Удалить все +edit=Изменить write=Редактирование preview=Предпросмотр @@ -91,11 +92,16 @@ loading=Загрузка… step1=Шаг 1: step2=Шаг 2: +error=Ошибка error404=Страница, которую вы пытаетесь открыть, либо не существует, либо вы не авторизованы для ее просмотра. +never=Никогда + [error] occurred=Произошла ошибка report_message=Если вы уверены, что это ошибка Gitea, пожалуйста, проверьте наличие существующей проблемы на GitHub и откройте новую при необходимости. +missing_csrf=Некорректный запрос: CSRF токен отсутствует +invalid_csrf=Неверный запрос: неверный CSRF токен [startpage] app_desc=Удобный сервис собственного хостинга репозиториев Git @@ -299,7 +305,8 @@ openid_connect_desc=Выбранный OpenID URI неизвестен. Свяж openid_register_title=Создать новый аккаунт openid_register_desc=Выбранный OpenID URI неизвестен. Свяжите с новой учетной записью здесь. openid_signin_desc=Введите свой OpenID URI. Например: https://anne.me, bob.openid.org.cn или gnusocial.net/carry. -disable_forgot_password_mail=Восстановление аккаунта отключено. Пожалуйста, свяжитесь с администратором сайта. +disable_forgot_password_mail=Восстановление учётной записи отключено, потому что электронная почта не настроена. Пожалуйста, свяжитесь с администратором сайта. +disable_forgot_password_mail_admin=Восстановление учетной записи доступно только при настройке электронной почты. Пожалуйста, настройте электронную почту, чтобы включить восстановление аккаунта. email_domain_blacklisted=С данным адресом электронной почты регистрация невозможна. authorize_application=Авторизация приложения authorize_redirect_notice=Вы будете перенаправлены на %s, если вы авторизуете это приложение. @@ -313,14 +320,63 @@ password_pwned=Выбранный вами пароль находится в %s, + activate_account=Пожалуйста активируйте свой аккаунт +activate_account.title=%s, пожалуйста, активируйте вашу учетную запись +activate_account.text_1=Привет, %[1]s, спасибо за регистрацию в %[2]s! +activate_account.text_2=Пожалуйста, перейдите по ссылке, чтобы активировать свою учетную запись в течение %s: + activate_email=Подтвердите адрес своей электронной почты -reset_password=Восстановить учётную запись -register_success=Регистрация прошла успешно +activate_email.title=%s, пожалуйста, подтвердите ваш адрес электронной почты +activate_email.text=Пожалуйста, перейдите по ссылке, чтобы подтвердить ваш адрес электронной почты в течение %s: + register_notify=Добро пожаловать на Gitea +register_notify.title=%[1], добро пожаловать в %[2] +register_notify.text_1=это письмо с вашим подтверждением регистрации в %s! +register_notify.text_2=Теперь вы можете войти через логин: %s. +register_notify.text_3=Если эта учетная запись была создана для вас, пожалуйста, сначала установите пароль. +reset_password=Восстановить учётную запись +reset_password.title=%s, вы запросили восстановление вашей учетной записи +reset_password.text=Пожалуйста, перейдите по ссылке, чтобы восстановить учетную запись в течение %s: +register_success=Регистрация прошла успешно +issue_assigned.pull=@%[1] назначил вам запрос на слияние %[2] в репозитории %[3]. +issue_assigned.issue=@%[1]s назначил вам задачу %[2]s в репозитории %[3]s. + +issue.x_mentioned_you=@%s упомянул вас: +issue.action.force_push=%[1]s форсировал отправку изменений %[2]s с %[3]s до %[4]s. +issue.action.push_n=@%[1]s отправил %[3]d изменений %[2]s +issue.action.close=@%[1]s закрыты #%[2]d. +issue.action.reopen=@%[1]s переоткрыты #%[2]d. +issue.action.merge=@%[1]s слиты #%[2]d в %[3]s. +issue.action.approve=@%[1]s одобрил этот запрос на слияние. +issue.action.reject=@%[1]s запросил изменения в этом запросе на слияние. +issue.action.review=@%[1]s прокомментировал этот запрос на слияние. +issue.action.review_dismissed=@%[1]s отклонил последний отзыв с %[2]s для этого запроса на слияние. +issue.action.ready_for_review=@%[1]s отметил этот запрос на слияние как готовый к рассмотрению. +issue.action.new=@%[1]s создал #%[2]d. +issue.in_tree_path=В %s: + +release.new.subject=%s в %s выпущено +release.new.text=@%[1]s выпустил релиз %[2]s в %[3]s +release.title=Название: %s +release.note=Примечание: +release.downloads=Загрузки: +release.download.zip=Исходный код (ZIP) +release.download.targz=Исходный код (TAR.GZ) + +repo.transfer.subject_to=%s хочет передать "%s" в %s +repo.transfer.subject_to_you=%s хочет передать "%s" вам +repo.transfer.to_you=вам +repo.transfer.body=Для того чтобы принять или отклонить перейдите по ссылке %s или просто проигнорируйте данный запрос. + +repo.collaborator.added.subject=%s добавил вас в %s +repo.collaborator.added.text=Вы были добавлены в качестве соавтора репозитория: [modal] yes=Да @@ -361,6 +417,7 @@ email_error=`не является адресом электронной поч url_error=` не является допустимым URL-адресом.` include_error=` должен содержать '%s'.` glob_pattern_error=` неверный glob шаблон: %s.` +regex_pattern_error=` Неверный шаблон регулярного выражения: %s.` unknown_error=Неизвестная ошибка: captcha_incorrect=Капча не пройдена. password_not_match=Пароли не совпадают. @@ -415,6 +472,7 @@ repositories=Репозитории activity=Активность followers=Подписчики starred=Избранные репозитории +watched=Отслеживаемые репозитории projects=Проекты following=Подписки follow=Подписаться @@ -536,7 +594,19 @@ ssh_key_been_used=Этот SSH ключ уже был добавлен на се ssh_key_name_used=SSH ключ с этим именем уже есть в вашем аккаунте. ssh_principal_been_used=Участник уже был добавлен на сервер. gpg_key_id_used=Публичный GPG ключ с таким же идентификатором уже существует. -gpg_no_key_email_found=Этот ключ GPG не может использоваться с любым адресом электронной почты, привязанной к вашей учетной записи. +gpg_no_key_email_found=Этот GPG ключ не соответствует ни одному активному адресу электронной почты, связанному с вашей учетной записью. Он по-прежнему может быть добавлен, если вы подписали указанный токен. +gpg_key_matched_identities=Соответствующие идентификаторы: +gpg_key_verified=Проверенный ключ +gpg_key_verified_long=Ключ был проверен токеном и может быть использован для проверки коммитов, соответствующих любым активным адресом электронной почты этого пользователя в дополнение к любым соответствующим идентификаторам этого ключа. +gpg_key_verify=Проверить +gpg_invalid_token_signature=Предоставленный GPG ключ, подпись и токен не совпадают или токен устарел. +gpg_token_required=Вы должны предоставить подпись для токена ниже +gpg_token=Токен +gpg_token_help=Вы можете сгенерировать подпись с помощью: +gpg_token_code=echo "%s" | gpg -a --default-key %s --detach-sig +gpg_token_signature=Бронированная GPG подпись +key_signature_gpg_placeholder=Начинается с '-----BEGIN PGP SIGNATURE-----' +verify_gpg_key_success=GPG ключ '%s' проверен. subkeys=Подключи key_id=ИД ключа key_name=Имя ключа @@ -667,6 +737,14 @@ email_notifications.onmention=Посылать письмо на эл. почт email_notifications.disable=Отключить почтовые уведомления email_notifications.submit=Установить настройки электронной почты +visibility=Видимость пользователя +visibility.public=Публичный +visibility.public_tooltip=Видимый для всех пользователей +visibility.limited=Ограниченный +visibility.limited_tooltip=Видимый только авторизованным пользователям +visibility.private=Приватный +visibility.private_tooltip=Видимый только членам организации + [repo] new_repo_helper=Репозиторий содержит все файлы проекта, включая историю ревизии. Уже есть где-то еще? Мигрировать репозиторий. owner=Владелец @@ -717,10 +795,17 @@ mirror_prune_desc=Удаление устаревших отслеживаемы mirror_interval=Интервал зеркалирования (допустимые единицы измерения 'h', 'm', 's'). Значение 0 отключает синхронизацию. mirror_interval_invalid=Недопустимый интервал зеркалирования. mirror_address=Клонировать по URL -mirror_address_desc=Поместите все необходимые учётные данные в раздел Авторизация клона. +mirror_address_desc=Поместите необходимые учетные данные в секцию авторизации. mirror_address_url_invalid=Указанный url неверный. Вы должны правильно экранировать все компоненты url. mirror_address_protocol_invalid=Указанный url неверный. Только http(s):// или git:// местоположения могут быть зеркалированы. +mirror_lfs=Хранилище больших файлов (LFS) +mirror_lfs_desc=Активировать зеркалирование данных LFS. +mirror_lfs_endpoint=LFS Endpoint +mirror_lfs_endpoint_desc=Sync попытается использовать URL-адрес клона для определения сервера LFS. Вы также можете указать пользовательскую конечную точку, если данные хранится где-то в хранилище. mirror_last_synced=Последняя синхронизация +mirror_password_placeholder=(Неизменный) +mirror_password_blank_placeholder=(Отменено) +mirror_password_help=Смените имя пользователя для удаления пароля. watchers=Наблюдатели stargazers=Звездочеты forks=Форки @@ -737,6 +822,7 @@ delete_preexisting_label=Удалить delete_preexisting=Удалить уже существующие файлы delete_preexisting_content=Удалить файлы из %s delete_preexisting_success=Удалены непринятые файлы в %s +blame_prior=Посмотреть авторство до этих изменений transfer.accept=Принять трансфер transfer.accept_desc=Переместить в "%s" @@ -773,11 +859,16 @@ form.reach_limit_of_creation_n=Вы уже достигли ваш предел form.name_reserved=Название репозитория '%s' зарезервировано. form.name_pattern_not_allowed=Шаблон имени репозитория '%s' не допускается. -need_auth=Требуется авторизация +need_auth=Авторизация migrate_options=Параметры миграции migrate_service=Сервис миграции migrate_options_mirror_helper=Этот репозиторий будет зеркалом migrate_options_mirror_disabled=Администратор вашего сайта отключил новые зеркала. +migrate_options_lfs=Перенос LFS файлов +migrate_options_lfs_endpoint.label=LFS Endpoint +migrate_options_lfs_endpoint.description=Миграция попытается использовать ваш Git удаленно, чтобы определить сервер LFS. Вы также можете указать пользовательскую конечную точку, если данные хранится где-то в хранилище. +migrate_options_lfs_endpoint.description.local=Поддерживается также путь на локальном сервере. +migrate_options_lfs_endpoint.placeholder=Оставьте пустым для получения из клонируемого URL migrate_items=Элементы миграции migrate_items_wiki=Вики migrate_items_milestones=Этапы @@ -794,6 +885,7 @@ migrate.permission_denied=У вас нет прав на импорт локал migrate.permission_denied_blocked=Вам не разрешено импортировать с заблокированных узлов. migrate.permission_denied_private_ip=Вы не можете импортировать с приватных IP. migrate.invalid_local_path=Недопустимый локальный путь. Возможно он не существует или не является папкой. +migrate.invalid_lfs_endpoint=Конечная точка LFS недействительна. migrate.failed=Миграция не удалась: %v migrate.migrate_items_options=Токен доступа необходим для миграции дополнительных элементов migrated_from=Перенесено с %[2]s @@ -801,11 +893,19 @@ migrated_from_fake=Перенесено с %[1]s migrate.migrate=Миграция из %s migrate.migrating=Перенос из %s... migrate.migrating_failed=Перенос из %s не удался. +migrate.migrating_failed.error=Ошибка: %s migrate.github.description=Миграция данных с Github.com или Github Enterprise. migrate.git.description=Миграция или зеркалирование данных git из служб Git migrate.gitlab.description=Миграция данных с GitLab.com или сервера Self-Hosted gitlab. migrate.gitea.description=Миграция данных с Gitea.com или отдельного сервера Gitea. migrate.gogs.description=Перенос данных с notabug.org или другого сервера Gogs. +migrate.migrating_git=Перенос Git данных +migrate.migrating_topics=Миграция тем +migrate.migrating_milestones=Миграция этапов +migrate.migrating_labels=Миграция меток +migrate.migrating_releases=Миграция релизов +migrate.migrating_issues=Миграция Замечаний +migrate.migrating_pulls=Миграция Pull Request mirror_from=зеркало из forked_from=форкнуто от @@ -838,6 +938,7 @@ branch=ветка tree=Дерево clear_ref=`Удалить текущую ссылку` filter_branch_and_tag=Фильтр по ветке или тегу +find_tag=Найти тег branches=Ветки tags=Теги issues=Задачи @@ -1107,6 +1208,8 @@ issues.context.edit=Редактировать issues.context.delete=Удалить issues.no_content=Пока нет содержимого. issues.close_issue=Закрыть +issues.pull_merged_at=`Объединил коммит %[2]s в %[3]s %[4]s` +issues.manually_pull_merged_at=`%[4]s вручную объединил коммит %[2]s в %[3]s` issues.close_comment_issue=Прокомментировать и закрыть issues.reopen_issue=Открыть снова issues.reopen_comment_issue=Прокомментировать и открыть снова @@ -1200,6 +1303,7 @@ issues.error_modifying_due_date=Не удалось изменить срок в issues.error_removing_due_date=Не удалось убрать срок выполнения. issues.push_commit_1=добавил(а) %d коммит %s issues.push_commits_n=добавил(а) %d коммитов %s +issues.force_push_codes=`принудительно залито %[1]s от %[2] к %[4]s %[6]s` issues.due_date_form=гггг-мм-дд issues.due_date_form_add=Добавить срок выполнения issues.due_date_form_edit=Редактировать @@ -1265,6 +1369,9 @@ issues.review.resolved_by=пометить этот разговор как ра issues.assignee.error=Не все назначения были добавлены из-за непредвиденной ошибки. issues.reference_issue.body=Тело +compare.compare_base=Основа +compare.compare_head=сравнить + pulls.desc=Включить запросы на слияние и проверки кода. pulls.new=Новый Pull Request pulls.compare_changes=Новый Pull Request @@ -1292,7 +1399,10 @@ pulls.manually_merged_as=Pull request был объединён вручную, pulls.is_closed=Слияние этого запроса успешно завершено. pulls.has_merged=Слияние этого запроса успешно завершено. pulls.title_wip_desc=`Добавьте %s в начало заголовка для защиты от случайного досрочного принятия Pull Request'а.` -pulls.cannot_merge_work_in_progress=Данный Pull Request помечен как находящийся ещё в разработке. Удалите %s из названия после завершения работы над ним +pulls.cannot_merge_work_in_progress=Этот запрос на слияние помечен как в процессе работы. +pulls.still_in_progress=Всё ещё в процессе? +pulls.add_prefix=Добавить %s префикс +pulls.remove_prefix=Удалить %s префикс pulls.data_broken=Содержимое этого запроса было нарушено вследствие удаления информации форка. pulls.files_conflicted=Этот Pull Request имеет изменения, конфликтующие с целевой веткой. pulls.is_checking=Продолжается проверка конфликтов, пожалуйста обновите страницу несколько позже. @@ -1518,6 +1628,15 @@ settings.hooks=Веб-хуки settings.githooks=Git Hook'и settings.basic_settings=Основные параметры settings.mirror_settings=Настройки зеркалирования +settings.mirror_settings.docs=Настройте свой проект, чтобы автоматически отправлять и/или получать изменения из другого репозитория. Ветки, теги и коммиты будут синхронизированы автоматически. Как мне зеркалировать репозитории? +settings.mirror_settings.mirrored_repository=Синхронизированное хранилище +settings.mirror_settings.direction=Направление +settings.mirror_settings.direction.pull=Отправка +settings.mirror_settings.direction.push=Получение +settings.mirror_settings.last_update=Последнее обновление +settings.mirror_settings.push_mirror.none=Push-зеркало не добавлено +settings.mirror_settings.push_mirror.remote_url=URL удалённого хранилища +settings.mirror_settings.push_mirror.add=Добавить Push-зеркало settings.sync_mirror=Синхронизировать settings.mirror_sync_in_progress=Синхронизируются репозитории-зеркала. Подождите минуту и обновите страницу. settings.email_notifications.enable=Включить почтовые уведомления @@ -1526,6 +1645,7 @@ settings.email_notifications.disable=Отключить почтовые уве settings.email_notifications.submit=Установить настройки электронной почты settings.site=Сайт settings.update_settings=Обновить настройки +settings.branches.update_default_branch=Обновить ветку по умолчанию settings.advanced_settings=Расширенные настройки settings.wiki_desc=Включить Вики для репозитория settings.use_internal_wiki=Использовать встроенную вики-систему @@ -1555,6 +1675,7 @@ settings.pulls.allow_rebase_merge_commit=Разрешить rebase с явным settings.pulls.allow_squash_commits=Разрешить объединять коммиты перед слиянием (squash) settings.pulls.allow_manual_merge=Пометить PR как слитый вручную settings.pulls.enable_autodetect_manual_merge=Включить автоопределение ручного слияния (Примечание: в некоторых особых случаях могут возникнуть ошибки) +settings.pulls.default_delete_branch_after_merge=Удалить ветку Pull Request после слияния по умолчанию settings.projects_desc=Включить проекты репозитория settings.admin_settings=Настройки администратора settings.admin_enable_health_check=Выполнять проверки целостности этого репозитория (git fsck) @@ -1582,6 +1703,7 @@ settings.transfer_form_title=Введите сопутствующую инфо settings.transfer_in_progress=Трансфер в процессе выполнения. Отмените его, если желаете выполнить трансфер другому пользователю. settings.transfer_notices_1=- Вы можете потерять доступ, если новый владелец является отдельным пользователем. settings.transfer_notices_2=- Вы сохраните доступ, если новым владельцем станет организация, владельцем которой вы являетесь. +settings.transfer_notices_3=- если репозиторий является приватным и передается отдельному пользователю, это действие позволяет убедиться, что пользователь имеет хотя бы права на чтение (и при необходимости изменяет права доступа). settings.transfer_owner=Новый владелец settings.transfer_perform=Выполнить трансфер settings.transfer_started=Репозиторий ожидает подтверждения трансфера от "%s" @@ -1705,7 +1827,7 @@ settings.event_pull_request_review_desc=Запрос на слияние утв settings.event_pull_request_sync=Синхронизация Pull Request settings.event_pull_request_sync_desc=Запрос на слияние синхронизирован. settings.branch_filter=Фильтр веток -settings.branch_filter_desc=Белый список ветвей для событий Push, создания ветвей и удаления ветвей, указанных в виде глобуса. Если пусто или *, сообщается о событиях для всех филиалов. Смотрите github.com/gobwas/glob документацию по синтаксису. Примеры: master, {master,release*}. +settings.branch_filter_desc=Белый список ветвей для событий Push, создания ветвей и удаления ветвей, указанных в виде глоб-шаблона. Если пустой или *, то все событий для всех ветвей будут зарегистрированы. Перейдите по ссылке github.com/gobwas/glob на документацию по синтаксису. Примеры: master, {master,release*}. settings.active=Активный settings.active_helper=Информация о происходящих событиях будет отправляться на URL-адрес этого вебхука. settings.add_hook_success=Вебхук был добавлен. @@ -1775,7 +1897,7 @@ settings.dismiss_stale_approvals_desc=Когда новые коммиты, из settings.require_signed_commits=Требовать подписанные коммиты settings.require_signed_commits_desc=Отклонить push'ы в эту ветку, если они не подписаны или не проверены. settings.protect_protected_file_patterns=Защищённые шаблоны файлов (разделённые через '\;'): -settings.protect_protected_file_patterns_desc=Защищенные файлы, которые не могут быть изменены напрямую, даже если пользователь имеет право добавлять, редактировать или удалять файлы в этой ветке. Несколько шаблонов могут быть разделены точкой с запятой ('\;'). Смотрите github.com/gobwas/glob документацию для синтаксиса шаблонов. Например: .drone.yml, /docs/**/*.txt. +settings.protect_protected_file_patterns_desc=Защищенные файлы, которые не могут быть изменены напрямую, даже если пользователь имеет право добавлять, редактировать или удалять файлы в этой ветке. Шаблоны могут быть разделены точкой с запятой ('\;'). Смотрите github.com/gobwas/glob документацию для синтаксиса шаблонов. Например: .drone.yml, /docs/**/*.txt. settings.add_protected_branch=Включить защиту settings.delete_protected_branch=Отключить защиту settings.update_protect_branch_success=Настройки защиты ветки '%s' были успешно изменены. @@ -1789,10 +1911,21 @@ settings.block_on_official_review_requests_desc=Слияние невозмож settings.block_outdated_branch=Блокировать слияние, если pull request устарел settings.block_outdated_branch_desc=Слияние будет невозможно, если головная ветвь находится позади базовой ветви. settings.default_branch_desc=Главная ветка является "базовой" для вашего репозитория, на которую по умолчанию направлены все Pull Request'ы и которая является лицом вашего репозитория. Первое, что увидит посетитель — это содержимое главной ветки. Выберите её из уже существующих: +settings.default_merge_style_desc=Стиль слияния по умолчанию: settings.choose_branch=Выберите ветку… settings.no_protected_branch=Нет защищённых веток. settings.edit_protected_branch=Редактировать settings.protected_branch_required_approvals_min=Число необходимых одобрений не может быть отрицательным. +settings.tags=Теги +settings.tags.protection=Защита тегов +settings.tags.protection.pattern=Шаблон тегов +settings.tags.protection.allowed=Разрешено +settings.tags.protection.allowed.users=Разрешенные пользователи +settings.tags.protection.allowed.teams=Разрешенные команды +settings.tags.protection.allowed.noone=Ни один +settings.tags.protection.create=Защитить тег +settings.tags.protection.none=Нет защищенных тегов. +settings.tags.protection.pattern.description=Вы можете использовать одно имя или глоб-шаблон или регулярное выражение, для выбора нескольких тегов. Подробнее о защищенных тэгах. settings.bot_token=Токен для бота settings.chat_id=ID чата settings.matrix.homeserver_url=URL домашнего сервера @@ -1806,6 +1939,7 @@ settings.archive.success=Репозиторий был успешно архив settings.archive.error=Ошибка при попытке архивировать репозиторий. Смотрите логи для получения подробностей. settings.archive.error_ismirror=Вы не можете поместить зеркалируемый репозиторий в архив. settings.archive.branchsettings_unavailable=Настройки ветки недоступны, если репозиторий архивирован. +settings.archive.tagsettings_unavailable=Настройки тегов недоступны, если репозиторий архивирован. settings.unarchive.button=Разархивировать settings.unarchive.header=Разархивировать этот репозиторий settings.unarchive.text=Разархивация восстанавливает возможность совершать push в репозиторий, создавать новые коммиты, задачи и запросы на слияние. @@ -1857,6 +1991,7 @@ diff.whitespace_ignore_at_eol=Игнорировать изменения в п diff.stats_desc= %d изменённых файлов: %d добавлений и %d удалений diff.stats_desc_file=%d изменений: %d дополнений и %d удалений diff.bin=Двоичные данные +diff.bin_not_shown=Двоичный файл не отображается. diff.view_file=Просмотреть файл diff.file_before=До diff.file_after=После @@ -1864,6 +1999,7 @@ diff.file_image_width=Ширина diff.file_image_height=Высота diff.file_byte_size=Размер diff.file_suppressed=Разница между файлами не показана из-за своего большого размера +diff.file_suppressed_line_too_long=Различия файлов скрыты, потому что одна или несколько строк слишком длинны diff.too_many_files=Некоторые файлы не были показаны из-за большого количества измененных файлов diff.comment.placeholder=Оставить комментарий diff.comment.markdown_info=Поддерживается синтаксис Markdown. @@ -1891,6 +2027,7 @@ release.new_release=Новый релиз release.draft=Черновик release.prerelease=Пре-релиз release.stable=Стабильный +release.compare=Сравнить release.edit=Редактировать release.ahead.commits=%d коммиты release.ahead.target=%s с этого релиза @@ -1917,6 +2054,7 @@ release.deletion_tag_desc=Этот тег будет удалён из хран release.deletion_tag_success=Тег был удалён. release.tag_name_already_exist=Релиз с этим именем метки уже существует. release.tag_name_invalid=Имя тега является не допустимым. +release.tag_name_protected=Имя тега защищено. release.tag_already_exist=Этот тег уже используется. release.downloads=Загрузки release.download_count=Загрузки: %s @@ -1948,6 +2086,10 @@ branch.restore=Восстановить ветку '%s' branch.download=Скачать ветку '%s' branch.included_desc=Эта ветка является частью ветки по умолчанию branch.included=Включено +branch.create_new_branch=Создать ветку из ветви: +branch.confirm_create_branch=Создать ветку +branch.new_branch=Создать новую ветку +branch.new_branch_from=Создать новую ветку из '%s' tag.create_tag=Создать тег %s tag.create_success=Тег '%s' был создан. @@ -1957,6 +2099,9 @@ topic.done=Сохранить topic.count_prompt=Вы не можете выбрать более 25 тем topic.format_prompt=Темы должны начинаться с буквы или цифры, могут содержать дефисы(-) и должны содержать не более 35 символов. +error.csv.too_large=Не удается отобразить этот файл, потому что он слишком большой. +error.csv.unexpected=Не удается отобразить этот файл, потому что он содержит неожиданный символ в строке %d и столбце %d. +error.csv.invalid_field_count=Не удается отобразить этот файл, потому что он имеет неправильное количество полей в строке %d. [org] org_name_holder=Название организации @@ -1996,7 +2141,7 @@ settings.visibility.public=Публичный settings.visibility.limited=Ограничено (Видно только для авторизованных пользователей) settings.visibility.limited_shortname=Ограничить settings.visibility.private=Частный (Видимый только для участников организации) -settings.visibility.private_shortname=Приватизировать +settings.visibility.private_shortname=Приватный settings.update_settings=Обновить настройки settings.update_setting_success=Настройки организации обновлены. @@ -2154,6 +2299,8 @@ dashboard.total_gc_time=Итоговая задержка GC dashboard.total_gc_pause=Итоговая задержка GC dashboard.last_gc_pause=Последняя пауза сборщика мусора dashboard.gc_times=Количество сборок мусора +dashboard.delete_old_actions=Удалите все старые действия из базы данных +dashboard.delete_old_actions.started=Удалите все старые действия из запущенной базы данных. users.user_manage_panel=Панель управления пользователями users.new_account=Создать новый аккаунт @@ -2249,7 +2396,6 @@ auths.host=Сервер auths.port=Порт auths.bind_dn=Bind DN auths.bind_password=Привязать пароль -auths.bind_password_helper=Предупреждение: этот пароль хранится в виде простого текста. Используйте учетную запись только для чтения, если это возможно. auths.user_base=База для поиска пользователя auths.user_dn=DN пользователя auths.attribute_username=Атрибут Username @@ -2280,6 +2426,7 @@ auths.allowed_domains_helper=Оставьте пустым, чтобы разр auths.enable_tls=Включение шифрования TLS auths.skip_tls_verify=Пропустить проверку TLS auths.pam_service_name=Имя службы PAM +auths.pam_email_domain=Домен почты PAM (необязательно) auths.oauth2_provider=Поставщик OAuth2 auths.oauth2_icon_url=URL иконки auths.oauth2_clientID=ID клиента (ключ) @@ -2379,6 +2526,7 @@ config.db_path=Путь config.service_config=Сервисная конфигурация config.register_email_confirm=Требуется подтверждение по электронной почте config.disable_register=Отключить самостоятельную регистрацию +config.allow_only_internal_registration=Разрешить регистрацию только через Gitea config.allow_only_external_registration=Разрешить регистрацию только через сторонние сервисы config.enable_openid_signup=Включить cамостоятельную регистрацию OpenID config.enable_openid_signin=Включение входа через OpenID diff --git a/options/locale/locale_sr-SP.ini b/options/locale/locale_sr-SP.ini index 2274dc10409b..48f5d7e7983f 100644 --- a/options/locale/locale_sr-SP.ini +++ b/options/locale/locale_sr-SP.ini @@ -37,6 +37,7 @@ cancel=Откажи + [error] [startpage] @@ -86,12 +87,19 @@ has_unconfirmed_mail=Здраво, %s! Имате непотврђену адр resend_mail=Кликните овде да поново пошаљете писмо [mail] + activate_account=Молимо вас активирајте ваш налог + activate_email=Потврдите вашу адресу е-поште + + + + + [modal] yes=Да no=Не @@ -193,6 +201,7 @@ delete_account=Уклоните ваш налог confirm_delete_account=Потврдите брисање + [repo] owner=Власник repo_name=Име спремишта @@ -335,6 +344,7 @@ issues.num_participants=%d учесника issues.attachment.open_tab=`Кликните "%s" да видите у новом прозору` issues.attachment.download=`Кликните да преузмете "%s"` + pulls.new=Нови захтев за спајање pulls.filter_branch=Филтер по грани pulls.no_results=Нема резултата. diff --git a/options/locale/locale_sv-SE.ini b/options/locale/locale_sv-SE.ini index 9f3c1b23396b..a1c13f2e411e 100644 --- a/options/locale/locale_sv-SE.ini +++ b/options/locale/locale_sv-SE.ini @@ -89,6 +89,7 @@ loading=Laddar… error404=Sidan du försöker nå finns inte eller så har du inte behörighet att se den. + [error] occurred=Ett fel har inträffat report_message=Om du är säker på att detta är en Gitea bugg, vänligen sök efter ärende på GitHub och öppna nytt ärende om det behövs. @@ -246,6 +247,7 @@ register_helper_msg=Har du redan ett konto? Logga in nu! social_register_helper_msg=Har du redan ett konto? Länka det nu! disable_register_prompt=Registrering inaktiverad. Vänligen kontakta din sidadministratör. disable_register_mail=Bekräftelsemejl vid registrering är inaktiverad. +remember_me=Kom ihåg denna enhet forgot_password_title=Glömt lösenord forgot_password=Glömt lösenord? sign_up_now=Behöver du ett konto? Registrera nu. @@ -278,6 +280,7 @@ twofa_scratch_token_incorrect=Din skrapkod är ogiltlig. login_userpass=Logga in login_openid=OpenID oauth_signup_tab=Skapa nytt konto +oauth_signup_title=Slutför nytt konto oauth_signup_submit=Slutför kontot oauth_signin_tab=Länka till befintligt konto oauth_signin_title=Logga in för att godkänna länkat konto @@ -288,7 +291,6 @@ openid_connect_desc=Vald OpenID URI är okänd. Associera den med ett nytt konto openid_register_title=Skapa nytt konto openid_register_desc=Vald OpenID URI är okänd. Associera den med ett nytt konto här. openid_signin_desc=Ange din OpenID URI. Exempelvis: https://anne.me, bob.openid.org.cn eller gnusocial.net/carry. -disable_forgot_password_mail=Kontoåterställning är inaktiverat. Vänligen kontakta din webbplatsadministratör. email_domain_blacklisted=Du kan inte registrera dig med din e-postadress. authorize_application=Godkänn applikation authorize_redirect_notice=Du kommer att omdirigeras till %s om du auktoriserar denna applikation. @@ -302,11 +304,18 @@ password_pwned=Lösenordet du valde finns på en spegel @@ -851,6 +859,7 @@ editor.file_already_exists=En fil vid namn '%s' finns redan i denna utvecklingsk editor.commit_empty_file_header=Committa en tom fil editor.commit_empty_file_text=Filen du vill committa är tom. Vill du fortsätta? editor.no_changes_to_show=Det finns inga ändringar att visa. +editor.fail_to_update_file_summary=Felmeddelande: editor.push_rejected_no_message=Ändringarna avvisades av servern utan något meddelande. Kontrollera githookarna. editor.add_subdir=Lägga till en katalog… editor.unable_to_upload_files=Uppladdning av filen '%s' misslyckades med felet: %v @@ -891,9 +900,13 @@ projects.deletion_success=Projektet har tagits bort. projects.edit=Redigera projekt projects.modify=Uppdatera projekt projects.edit_success=Projektet '%s' har uppdaterats. +projects.type.none=Ingen projects.template.desc=Projektmall projects.type.uncategorized=Okatergoriserad +projects.board.edit=Redigera tavla projects.board.new_submit=Skicka +projects.board.new=Ny tavla +projects.board.delete=Ta bort tavla projects.open=Öppna projects.close=Stäng @@ -1148,6 +1161,7 @@ issues.review.resolve_conversation=Lös konversation issues.review.resolved_by=markerade denna konversation som löst issues.assignee.error=Inte alla tilldelade har lagts till på grund av ett oväntat fel. + pulls.desc=Aktivera pull-förfrågningar och kodgranskning. pulls.new=Ny Pull-Förfrågan pulls.compare_changes=Ny Pull-Request @@ -1172,7 +1186,6 @@ pulls.merged_as=Pull-förfrågan har sammanfogats som Börja titeln med %s för att förhindra att pull-förfrågan sammanfogas av misstag` -pulls.cannot_merge_work_in_progress=Denna pull-förfrågan är markerad som ett pågående arbete. Ta bort prefixet %s från titeln när den är klar pulls.data_broken=Pull-requesten är trasig pågrund av oexisterande information on forken. pulls.files_conflicted=Den här pull-förfrågan ha ändringar som är i konflikt med mål-branchen. pulls.is_checking=Merge-konfliktkontroll pågår. Försök igen senare. @@ -1904,7 +1917,6 @@ auths.host=Värd auths.port=Port auths.bind_dn=Bind DN auths.bind_password=Bind Lösenord -auths.bind_password_helper=Varning: Detta lösenord lagras i klartext. Använd ett konto med endast läsrättigheter om möjligt. auths.user_base=Användarsökbas auths.user_dn=Användarnas DN auths.attribute_username=Användarnamnsattribut @@ -2117,6 +2129,7 @@ notices.delete_selected=Ta Bort Markerade notices.delete_all=Ta Bort Alla Notiser notices.type=Typ notices.type_1=Utvecklingskatalog +notices.type_2=Uppgift notices.desc=Beskrivning notices.op=Op. notices.delete_success=Systemnotifikationer har blivit raderade. @@ -2135,6 +2148,7 @@ merge_pull_request=`sammanslog pull-request %s#%[2]s` transfer_repo=överförde utvecklingskalatogen %s till %s delete_tag=tog bort taggen %[2]s från %[3]s delete_branch=tog bort branchen %[2]s from %[3]s +compare_branch=Jämför compare_commits=Jämför %d commits compare_commits_general=Jämför commits mirror_sync_create=synkade ny referens %[2]s till %[3]s från spegel diff --git a/options/locale/locale_tr-TR.ini b/options/locale/locale_tr-TR.ini index 5e8decd80294..69ac3367c185 100644 --- a/options/locale/locale_tr-TR.ini +++ b/options/locale/locale_tr-TR.ini @@ -91,8 +91,11 @@ loading=Yükleniyor… step1=1. Adım: step2=2. Adım: +error=Hata error404=Ulaşmaya çalıştığınız sayfa mevcut değil veya görüntüleme yetkiniz yok. +never=Asla + [error] occurred=Bir hata oluştu report_message=Bunun bir Gitea hatası olduğundan eminseniz, lütfen GitHub 'da sorunu arayın ve gerekirse yeni bir sorun açın. @@ -299,7 +302,6 @@ openid_connect_desc=Seçilen OpenID URI'si bilinmiyor. Burada yeni bir hesapla i openid_register_title=Yeni hesap oluştur openid_register_desc=Seçilen OpenID URI'si bilinmiyor. Burada yeni bir hesapla ilişkilendir. openid_signin_desc=OpenID URI'nızı girin. Örneğin: https://anne.me, bob.openid.org.cn veya gnusocial.net/carry. -disable_forgot_password_mail=Hesap kurtarma devre dışı. Lütfen site yöneticinizle iletişime geçin. email_domain_blacklisted=Bu e-posta adresinizle kayıt olamazsınız. authorize_application=Uygulamayı Yetkilendir authorize_redirect_notice=Bu uygulamayı yetkilendirirseniz %s adresine yönlendirileceksiniz. @@ -313,11 +315,18 @@ password_pwned=Seçtiğiniz parola, daha önce herkese açık veri ihlallerinde password_pwned_err=HaveIBeenPwned'e yapılan istek tamamlanamadı [mail] + activate_account=Lütfen hesabınızı aktifleştirin + activate_email=E-posta adresinizi doğrulayın + +register_notify=Gitea'ya Hoş Geldiniz + reset_password=Hesabınızı kurtarın + register_success=Kayıt başarılı -register_notify=Gitea'ya Hoş Geldiniz + + release.new.subject=%s içinden %s bırakıldı @@ -542,7 +551,6 @@ ssh_key_been_used=Bu SSH anahtarı, sunucuya zaten eklenmiş. ssh_key_name_used=Hesabınızda aynı ada sahip bir SSH anahtarı zaten var. ssh_principal_been_used=Bu sorumlu sunucuya zaten eklendi. gpg_key_id_used=Aynı kimliğe sahip bir açık GPG anahtarı zaten var. -gpg_no_key_email_found=Bu GPG anahtarı, hesabınızla ilişkili hiçbir e-posta adresiyle kullanılamaz. subkeys=Alt anahtarlar key_id=Anahtar Kimliği key_name=Anahtar İsmi @@ -673,6 +681,7 @@ email_notifications.onmention=Sadece Bahsedilen E-posta email_notifications.disable=E-posta Bildirimlerini Devre Dışı Bırak email_notifications.submit=E-posta Tercihlerini Ayarla + [repo] new_repo_helper=Bir depo, revizyon geçmişi dahil tüm proje dosyalarını içerir. Zaten başka bir yerde mi var? Depoyu taşıyın. owner=Sahibi @@ -723,7 +732,7 @@ mirror_prune_desc=Kullanılmayan uzak depoları izleyen referansları kaldır mirror_interval=Yansı Aralığı (geçerli zaman birimleri 'h', 'm', 's'). 0 otomatik senkronizasyonu devre dışı bırakmak için. mirror_interval_invalid=Yansı süre aralığı geçerli değil. mirror_address=URL'den Klonla -mirror_address_desc=Gerekli kimlikleri Yetkilendirmeyi Klonla bölümüne girin. +mirror_address_desc=Yetkilendirme bölümüne gerekli tüm kimlik bilgilerini girin. mirror_address_url_invalid=Sağlanan Url geçersiz. Url'nin tüm bileşenlerinden doğru olarak kaçmalısınız. mirror_address_protocol_invalid=Sağlanan url geçersiz. Yalnızca http(s):// veya git:// konumları yansıtılabilir. mirror_lfs=Büyük Dosya Depolama (LFS) @@ -731,6 +740,9 @@ mirror_lfs_desc=LFS verisinin yansılamasını etkinleştir. mirror_lfs_endpoint=LFS Uç Noktası mirror_lfs_endpoint_desc=Senkronizasyon, LFS sunucusunu belirlemek için klonlama url'sini kullanmaya çalışacak. Eğer LFS veri deposu başka yerdeyse özel bir uç nokta da belirtebilirsiniz. mirror_last_synced=Son Senkronize Edilen +mirror_password_placeholder=(Değiştirilmedi) +mirror_password_blank_placeholder=(Ayarı kaldır) +mirror_password_help=Saklanan bir parolayı silmek için kullanıcı adını değiştirin. watchers=İzleyenler stargazers=Yıldızlayanlar forks=Çatallamalar @@ -783,7 +795,7 @@ form.reach_limit_of_creation_n=Zaten %d depo limitinize ulaştınız. form.name_reserved=Depo ismi '%s' rezerve edildi. form.name_pattern_not_allowed='%s' deseni, depo adı için geçerli değildir. -need_auth=Yetkilendirmeyi Klonla +need_auth=Yetkilendirme migrate_options=Göç Seçenekleri migrate_service=Göç Hizmeti migrate_options_mirror_helper=Bu depo bir yansı olacaktır @@ -854,6 +866,7 @@ branch=Dal tree=Ağaç clear_ref='Geçerli referansı temizle' filter_branch_and_tag=Dal veya biçim imini filtrele +find_tag=Etiketi bul branches=Dal tags=Etiket issues=Konular @@ -1218,6 +1231,7 @@ issues.error_modifying_due_date=Bitiş tarihi değiştirilemedi. issues.error_removing_due_date=Bitiş tarihi silinemedi. issues.push_commit_1=%d işlemeyi %s ekledi issues.push_commits_n=%d işlemeyi %s ekledi +issues.force_push_codes=`%[1]s %[2]s hedefinden %[4]s hedefine zorla gönderildi %[6]s` issues.due_date_form=yyyy-aa-gg issues.due_date_form_add=Bitiş tarihi ekle issues.due_date_form_edit=Düzenle @@ -1283,6 +1297,9 @@ issues.review.resolved_by=bu konuşmayı çözümlenmiş olarak işaretledi issues.assignee.error=Beklenmeyen bir hata nedeniyle tüm atananlar eklenmedi. issues.reference_issue.body=Gövde +compare.compare_base=temel +compare.compare_head=karşılaştır + pulls.desc=Değişiklik isteklerini ve kod incelemelerini etkinleştir. pulls.new=Yeni Değişiklik İsteği pulls.compare_changes=Yeni Değişiklik İsteği @@ -1310,7 +1327,10 @@ pulls.manually_merged_as=Değişiklik isteği başlığı %s ile başlatın` -pulls.cannot_merge_work_in_progress=Bu değişiklik isteği devam eden bir çalışma olarak işaretlendi. Hazır olduğunda %s ön ekini başlıktan kaldırın +pulls.cannot_merge_work_in_progress=Bu değişiklik isteği, devam eden bir çalışma olarak işaretlendi. +pulls.still_in_progress=Hala devam ediyor mu? +pulls.add_prefix=%s ön ekini ekle +pulls.remove_prefix=%s ön ekini kaldır pulls.data_broken=Bu değişiklik isteği, çatallama bilgilerinin eksik olması nedeniyle bozuldu. pulls.files_conflicted=Bu değişiklik isteğinde, hedef dalla çakışan değişiklikler var. pulls.is_checking=Birleştirme çakışması denetimi devam ediyor. Birkaç dakika sonra tekrar deneyin. @@ -1536,6 +1556,15 @@ settings.hooks=Web İstemcileri settings.githooks=Git İstekleri settings.basic_settings=Temel Ayarlar settings.mirror_settings=Yansıma Ayarları +settings.mirror_settings.docs=Projenizi, değişiklikleri başka bir depoya/depodan otomatik olarak gönderecek ve/veya çekecek şekilde ayarlayın. Dallar, etiketler ve işlemeler otomatik olarak senkronize edilecektir. Depoları nasıl yansıtrım? +settings.mirror_settings.mirrored_repository=Yansıtılmış depo +settings.mirror_settings.direction=Yön +settings.mirror_settings.direction.pull=Çek +settings.mirror_settings.direction.push=Gönder +settings.mirror_settings.last_update=Son güncelleme +settings.mirror_settings.push_mirror.none=Yapılandırılmış yansı gönderimi yok +settings.mirror_settings.push_mirror.remote_url=Git Uzak Depo URL'si +settings.mirror_settings.push_mirror.add=Yansı Gönderimi Ekle settings.sync_mirror=Şimdi Eşitle settings.mirror_sync_in_progress=Yansı senkronizasyonu devam ediyor. Bir dakika sonra tekrar kontrol edin. settings.email_notifications.enable=E-posta Bildirimlerini Etkinleştir @@ -1544,6 +1573,7 @@ settings.email_notifications.disable=E-posta Bildirimlerini Devre Dışı Bırak settings.email_notifications.submit=E-posta Tercihlerini Ayarla settings.site=Web Sitesi settings.update_settings=Ayarları Güncelle +settings.branches.update_default_branch=Varsayılan Dalı Değiştir settings.advanced_settings=Gelişmiş Ayarlar settings.wiki_desc=Depo Wiki'sini Etkinkleştir settings.use_internal_wiki=Dahili Wiki Kullan @@ -1600,6 +1630,7 @@ settings.transfer_form_title=Onaylamak için depo adını girin: settings.transfer_in_progress=Şu anda devam etmekte olan bir aktarım mevcut. Eğer bu depoyu başka bir kullanıcıya aktarmak istiyorsanız mevcut aktarımı iptal edin. settings.transfer_notices_1=- Bireysel bir kullanıcıya aktarırsanız depoya erişiminizi kaybedersiniz. settings.transfer_notices_2=- Sahip (-yardımcı) olduğunuz bir organizasyona devrederseniz, depoya erişmeye devam edersiniz. +settings.transfer_notices_3=- Depo özelse ve bireysel bir kullanıcıya aktarılmışsa, bu eylem kullanıcının en azından okuma iznine sahip olmasını sağlar (ve gerekirse izinleri değiştirir). settings.transfer_owner=Yeni Sahip settings.transfer_perform=Aktarımı Gerçekleştir settings.transfer_started=Bu depo aktarılmak üzere işaretlendi ve "%s" tarafından onay bekliyor @@ -1723,7 +1754,6 @@ settings.event_pull_request_review_desc=Değişiklik isteği onaylandı, reddedi settings.event_pull_request_sync=Değişiklik İsteği Senkronize Edildi settings.event_pull_request_sync_desc=Değişiklik isteği senkronize edildi. settings.branch_filter=Dal filtresi -settings.branch_filter_desc=Glob deseni olarak belirtilen itme, dal oluşturma ve dal silme olayları için dal beyaz listesi. Boş veya * ise, tüm dallar için olaylar bildirilir. Sözdizimi için github.com/gobwas/glob belgelerine bakın. Örnekler: master, {master,release*}. settings.active=Etkin settings.active_helper=Tetiklenen olaylar hakkındaki bilgiler bu web isteği URL'sine gönderilir. settings.add_hook_success=Web isteği eklendi. @@ -1763,7 +1793,7 @@ settings.protected_branch_can_push_yes=İtebilirsiniz settings.protected_branch_can_push_no=İtemezsiniz settings.branch_protection=%s dalı için Dal Koruması settings.protect_this_branch=Dal Korumayı Etkinleştir -settings.protect_this_branch_desc=Silmeyi önler ve dala Git itmesini ve birleştirmesini kısıtlar. +settings.protect_this_branch_desc=Silmeyi önler ve dala Git gönderimini ve birleştirmesini kısıtlar. settings.protect_disable_push=İtmeyi Devre Dışı Bırak settings.protect_disable_push_desc=Bu dala itme yapılmasına izin verilmeyecek. settings.protect_enable_push=İtmeyi Etkinleştir @@ -1793,7 +1823,6 @@ settings.dismiss_stale_approvals_desc=Değişiklik isteğinin içeriğini deği settings.require_signed_commits=İmzalı İşleme Gerekli settings.require_signed_commits_desc=Reddetme, onlar imzasızsa veya doğrulanamazsa bu dala gönderir. settings.protect_protected_file_patterns=Korumalı dosya kalıpları (noktalı virgülle ayrılmış '\;'): -settings.protect_protected_file_patterns_desc=Kullanıcı bu dalda dosya ekleme, düzenleme veya silme haklarına sahip olsa bile doğrudan değiştirilmesine izin verilmeyen korumalı dosyalar. Birden çok desen noktalı virgül ('\;') kullanılarak ayrılabilir. Desen sözdizimi belgeleri için github.com/gobwas/glob adresine bakın. Örnekler: .drone.yml, /docs/**/*.txt. settings.add_protected_branch=Korumayı etkinleştir settings.delete_protected_branch=Korumayı devre dışı bırak settings.update_protect_branch_success='%s' dalı için dal koruması güncellendi. @@ -1884,6 +1913,7 @@ diff.file_image_width=Genişlik diff.file_image_height=Yükseklik diff.file_byte_size=Boyut diff.file_suppressed=Dosya farkı çok büyük olduğundan ihmal edildi +diff.file_suppressed_line_too_long=Dosya farkları bir veya daha fazla satır çok uzun olduğundan bastırıldı diff.too_many_files=Bu fark içinde çok fazla dosya değişikliği olduğu için bazı dosyalar gösterilmiyor diff.comment.placeholder=Yorum Yap diff.comment.markdown_info=Markdown ile şekillendirme desteklenir. @@ -1911,6 +1941,7 @@ release.new_release=Yeni Sürüm release.draft=Taslak release.prerelease=Ön Sürüm release.stable=Kararlı +release.compare=Karşılaştır release.edit=düzenle release.ahead.commits=%d işleme release.ahead.target=bu sürümden bu yana %s dalına gönderildi @@ -1968,6 +1999,10 @@ branch.restore='%s' Dalını Geri Yükle branch.download='%s' Dalını İndir branch.included_desc=Bu dal varsayılan dalın bir parçasıdır branch.included=Dahil +branch.create_new_branch=Şu daldan dal oluştur: +branch.confirm_create_branch=Dal oluştur +branch.new_branch=Yeni dal oluştur +branch.new_branch_from='%s' dalından yeni dal oluştur tag.create_tag=%s etiketi oluştur tag.create_success='%s' etiketi oluşturuldu. @@ -2177,6 +2212,8 @@ dashboard.total_gc_time=Toplam GC Durması dashboard.total_gc_pause=Toplam GC Durması dashboard.last_gc_pause=Son GC Durması dashboard.gc_times=GC Zamanları +dashboard.delete_old_actions=Veritabanından tüm eski eylemleri sil +dashboard.delete_old_actions.started=Veritabanından başlatılan tüm eski eylemleri silin. users.user_manage_panel=Kullanıcı Hesap Yönetimi users.new_account=Yeni Kullanıcı Hesabı @@ -2272,7 +2309,6 @@ auths.host=Sunucu auths.port=Bağlantı Noktası auths.bind_dn=Bağlama DN'i auths.bind_password=Bağlama Parolası -auths.bind_password_helper=Uyarı: Bu parola düz metin olarak saklanır. Mümkünse salt okunur bir hesap kullanın. auths.user_base=Kullanıcı Arama Tabanı auths.user_dn=Kullanıcı DN'i auths.attribute_username=Kullanıcı Adı Özelliği @@ -2303,6 +2339,7 @@ auths.allowed_domains_helper=Tüm alanlara izin vermek için boş bırakın. Bir auths.enable_tls=TLS Şifrelemeyi Aktifleştir auths.skip_tls_verify=TLS Doğrulamasını Atla auths.pam_service_name=PAM Servis Adı +auths.pam_email_domain=PAM E-posta Alan adı (tercihen) auths.oauth2_provider=OAuth2 Sağlayıcısı auths.oauth2_icon_url=Simge URL'si auths.oauth2_clientID=İstemci Kimliği (Anahtar) @@ -2402,6 +2439,7 @@ config.db_path=Yol config.service_config=Servis Yapılandırması config.register_email_confirm=Kayıt Olmak İçin E-posta Onayı Gereksin config.disable_register=Kullanıcı Kaydını Devre Dışı Bırak +config.allow_only_internal_registration=Kayda Sadece Gitea'nın Kendisi Üzerinden İzin Ver config.allow_only_external_registration=Sadece Dış Hizmetler Aracılığıyla Kullanıcı Kaydına İzin Ver config.enable_openid_signup=OpenID Kendinden Kaydı'nı Etkinleştir config.enable_openid_signin=OpenID Oturum Açmayı Etkinleştiriniz diff --git a/options/locale/locale_uk-UA.ini b/options/locale/locale_uk-UA.ini index 8ce795613449..a2a789ee77d2 100644 --- a/options/locale/locale_uk-UA.ini +++ b/options/locale/locale_uk-UA.ini @@ -15,16 +15,19 @@ page=Сторінка template=Шаблон language=Мова notifications=Сповіщення +active_stopwatch=Трекер робочого часу create_new=Створити… user_profile_and_more=Профіль і налаштування… signed_in_as=Увійшов як enable_javascript=Цей веб-сайт працює краще з JavaScript. toc=Зміст licenses=Ліцензії +return_to_gitea=Повернутися до Gitea username=Ім'я кристувача email=Адреса електронної пошти password=Пароль +access_token=Токен Доступу re_type=Введіть пароль ще раз captcha=CAPTCHA twofa=Двофакторна авторизація @@ -73,20 +76,27 @@ pull_requests=Запити на злиття issues=Проблеми milestones=Етапи +ok=OK cancel=Відмінити save=Зберегти add=Додати add_all=Додати все remove=Видалити remove_all=Видалити все +edit=Редагувати write=Писати preview=Попередній перегляд loading=Завантаження… +step1=Крок 1: +step2=Крок 2: +error=Помилка error404=Сторінка, до якої ви намагаєтеся звернутися або до , не існує або Ви не маєте права на її перегляд. +never=Ніколи + [error] occurred=Сталася помилка report_message=Якщо ви впевнені, що це помилка Gitea, будь ласка, спробуйте відшукати відповідну проблему на GitHub та за відсутності створіть нову. @@ -199,6 +209,8 @@ default_enable_timetracking=Увімкнути відстеження часу default_enable_timetracking_popup=Включити відстеження часу для нових репозиторіїв за замовчуванням. no_reply_address=Прихований поштовий домен no_reply_address_helper=Доменне ім'я для користувачів із прихованою електронною адресою. Наприклад, ім'я користувача 'joe' буде входити в Git як 'joe@noreply.example.org', якщо для прихованого домену електронної пошти встановлено 'noreply.example.org'. +password_algorithm=Алгоритм хешування пароля +password_algorithm_helper=Встановіть алгоритм хешування пароля. Алгоритми мають різні вимоги та силу. `argon2` незважаючи на хороші характеристики використовує багато пам'яті і може бути недоцільним для малих систем. [home] uname_holder=Ім'я користувача або Ел. пошта @@ -212,6 +224,7 @@ my_mirrors=Мої дзеркала view_home=Переглянути %s search_repos=Шукати репозиторій… filter=Інші фільтри +filter_by_team_repositories=Фільтрувати за репозиторіями команд show_archived=Архівовані show_both_archived_unarchived=Показано архівовані і не архівовані @@ -231,6 +244,8 @@ users=Користувачі organizations=Організації search=Пошук code=Код +search.fuzzy=Неточний +search.match=Відповідність repo_no_results=Відповідних репозиторіїв не знайдено. user_no_results=Відповідних користувачів не знайдено. org_no_results=Відповідних організацій не знайдено. @@ -244,6 +259,7 @@ register_helper_msg=Вже зареєстровані? Увійдіть зара social_register_helper_msg=Вже є аккаунт? Зв'яжіть його зараз! disable_register_prompt=Вибачте, можливість реєстрації відключена. Будь ласка, зв'яжіться з адміністратором сайту. disable_register_mail=Підтвердження реєстрації електронною поштою вимкнено. +remember_me=Запам’ятати цей пристрій forgot_password_title=Забув пароль forgot_password=Забули пароль? sign_up_now=Потрібен обліковий запис? Зареєструйтеся зараз. @@ -276,6 +292,7 @@ twofa_scratch_token_incorrect=Невірний одноразовий парол login_userpass=Увійти login_openid=OpenID oauth_signup_tab=Зареєструвати обліковий запис +oauth_signup_title=Повний новий обліковий запис oauth_signup_submit=Повний обліковий запис oauth_signin_tab=Посилання на існуючий обліковий запис oauth_signin_title=Увійдіть щоб авторизувати пов'язаний обліковий запис @@ -286,7 +303,8 @@ openid_connect_desc=Вибраний OpenID URI невідомий. Пов'яж openid_register_title=Створити новий обліковий запис openid_register_desc=Вибраний OpenID URI невідомий. Пов'яжіть йогоз новим обліковим записом тут. openid_signin_desc=Введіть свій ідентифікатор OpenID. Наприклад: https://anne.me, bob.openid.org.cn або gnusocial.net/carry. -disable_forgot_password_mail=Відновлення облікового запису вимкнено. Зверніться до адміністратора сайту. +disable_forgot_password_mail=Відновлення облікового запису вимкнено, оскільки не налаштована електронна пошта. Будь ласка, зв'яжіться з адміністратором сайту. +disable_forgot_password_mail_admin=Відновлення облікового запису доступне лише після налаштування електронної пошти. Будь ласка, налаштуйте ел. пошту для відновлення облікового запису. email_domain_blacklisted=З вказаним email реєстрація неможлива. authorize_application=Авторизувати програму authorize_redirect_notice=Вас буде переадресовано до %s, якщо ви авторизуєте цю програму. @@ -296,16 +314,65 @@ authorize_title=Авторизуйвати "%s" для доступу до ва authorization_failed=Помилка авторизації authorization_failed_desc=Авторизація не вдалася, оскільки ми виявили недійсний запит. Зверніться до супровідника програми, яку ви намагалися авторизувати. sspi_auth_failed=Помилка SSPI-автентифікації +password_pwned=Вибраний вами пароль знаходиться в списку вкрадених паролів раніше викритих у витоках публічних даних. Будь ласка, спробуйте ще раз з іншим паролем. +password_pwned_err=Не вдалося виконати запит до HaveIBeenPwed [mail] +view_it_on=Переглянути на %s +link_not_working_do_paste=Не працює? Спробуйте скопіювати та вставити його в свій браузер. +hi_user_x=Привіт %s, + activate_account=Будь ласка, активуйте ваш обліковий запис +activate_account.title=%s, будь ласка, активуйте свій обліковий запис + activate_email=Підтвердить вашу адресу електронної пошти -reset_password=Відновлення вашого облікового запису -register_success=Реєстрація успішна +activate_email.title=%s, будь ласка, підтвердіть вашу адресу електронної пошти +activate_email.text=Перейдіть за цим посиланням, щоб підтвердити вашу електронну адресу в %s: + register_notify=Ласкаво просимо у Gitea +register_notify.title=%[1]s, ласкаво просимо до %[2]s +register_notify.text_1=це ваша е-пошта для підтвердження реєстрації для %s! +register_notify.text_2=Тепер ви можете увійти як: %s. +register_notify.text_3=Якщо цей обліковий запис було створено для вас, будь ласка, спочатку встановіть свій пароль. +reset_password=Відновлення вашого облікового запису +reset_password.title=%s, ви відправили запит на відновлення облікового запису +reset_password.text=Перейдіть за цим посиланням, щоб відновити ваш обліковий запис в %s: +register_success=Реєстрація успішна +issue_assigned.pull=@%[1]s призначив вам запит на злиття %[2]s в репозиторії %[3]s. +issue_assigned.issue=@%[1]s призначив вам проблему %[2]s у репозиторії %[3]s. + +issue.x_mentioned_you=@%s згадав вас: +issue.action.force_push=%[1]s force-pushed %[2]s з %[3]s в %[4]s. +issue.action.push_n=@%[1]s відправив %[3]d коміти до %[2]s +issue.action.close=@%[1]s закрито #%[2]d. +issue.action.reopen=@%[1] заново відкрив #%[2]d. +issue.action.merge=@%[1]s об'єднав #%[2]d до %[3]s. +issue.action.approve=@%[1]s затвердили цей запит на злиття. +issue.action.reject=@%[1]s запитують зміни на цей запит на злиття. +issue.action.review=@%[1]s прокоментували цей запит на злиття. +issue.action.review_dismissed=@%[1]s відхилено останній відгук від %[2]s для цього запиту на злиття. +issue.action.ready_for_review=@%[1]s позначили цей запит на злиття як готовий до розгляду. +issue.action.new=@%[1]s створили #%[2]d. +issue.in_tree_path=В %s: + +release.new.subject=%s в %s випущено +release.new.text=@%[1]s випустив %[2]s в %[3]s +release.title=Назва: %s +release.note=Примітка: +release.downloads=Звантаження: +release.download.zip=Вихідний код (ZIP) +release.download.targz=Вихідний код (TAR.GZ) + +repo.transfer.subject_to=%s бажає передати"%s" в %s +repo.transfer.subject_to_you=%s бажає передати"%s" вам +repo.transfer.to_you=вам +repo.transfer.body=Щоб прийняти або відхилити перейдіть до %s або просто ігноруйте. + +repo.collaborator.added.subject=%s додав вас до %s +repo.collaborator.added.text=Ви були додані в якості співавтора репозиторію: [modal] yes=Так @@ -346,19 +413,26 @@ email_error=` не є адресою електронної пошти.` url_error=` не є припустимою URL-Адресою.` include_error=`повинен бути текст '%s'` glob_pattern_error=` неприпустимий шаблон glob: %s.` +regex_pattern_error=` неприпустимий шаблон regex: %s.` unknown_error=Невідома помилка: captcha_incorrect=Код CAPTCHA неправильний. password_not_match=Паролі не співпадають. lang_select_error=Оберіть мову з переліку. username_been_taken=Ім'я користувача вже зайнято. +username_change_not_local_user=Нелокальні користувачі не можуть змінити своє ім'я користувача. repo_name_been_taken=Ім'я репозіторію вже використовується. +repository_files_already_exist=Файли вже існують для цього репозитарію. Зверніться до системного адміністратора. +repository_files_already_exist.adopt=Файли вже існують для цього репозиторію і можуть бути лише прийняті. +repository_files_already_exist.delete=Файли вже існують для цього сховища. Ви повинні видалити їх. +repository_files_already_exist.adopt_or_delete=Файли вже існують для цього репозиторію. Їх можливо прийняти або видалити. visit_rate_limit=Обмеження швидкості віддаленого доступу. 2fa_auth_required=Для віддаленого доступу необхідна двуфакторна аутентифікація. org_name_been_taken=Назва організації вже зайнято. team_name_been_taken=Назва команди вже зайнято. team_no_units_error=Дозволити доступ до принаймні одного розділу репозитарію. email_been_used=Ця електронна адреса вже використовується. +email_invalid=Адреса електронної пошти помилкова. openid_been_used=OpenID адреса '%s' вже використовується. username_password_incorrect=Неправильне ім'я користувача або пароль. password_complexity=Пароль не відповідає вимогам до складності: @@ -367,14 +441,17 @@ password_uppercase_one=Принаймні одна буква в верхньо password_digit_one=Принаймні одна цифра password_special_one=Принаймні один спеціальний символ (пунктуація, дужки, лапки тощо) enterred_invalid_repo_name=Невірно введено ім'я репозиторію. +enterred_invalid_org_name=Невірно введено ім'я організації. enterred_invalid_owner_name=Ім'я нового власника не є дійсним. enterred_invalid_password=Введений вами пароль некоректний. user_not_exist=Даний користувач не існує. team_not_exist=Команда не існує. +last_org_owner=Ви не можете видалити останнього користувача з команди 'власники'. У кожній команді має бути принаймні один власник. cannot_add_org_to_team=Організацію неможливо додати як учасника команди. invalid_ssh_key=Неможливо перевірити ваш SSH ключ: %s invalid_gpg_key=Неможливо перевірити ваш GPG ключ: %s +invalid_ssh_principal=Некоректний відповідальний: %s unable_verify_ssh_key=Не вдається підтвердити ключ SSH; подвійно перевірте його на наявність похибки. auth_failed=Помилка автентифікації: %v @@ -391,6 +468,7 @@ repositories=Репозиторії activity=Публічна активність followers=Читачі starred=Обрані Репозиторії +watched=Відстежувані репозиторії projects=Проекти following=Читає follow=Підписатися @@ -422,6 +500,7 @@ uid=Ідентифікатор Uid u2f=Ключі безпеки public_profile=Загальнодоступний профіль +biography_placeholder=Розкажіть трохи про себе profile_desc=Ваша адреса електронної пошти використовуватиметься для сповіщення та інших операцій. password_username_disabled=Нелокальним користувачам заборонено змінювати ім'я користувача. Щоб отримати докладнішу інформацію, зв'яжіться з адміністратором сайту. full_name=Повне ім'я @@ -429,9 +508,11 @@ website=Веб-сайт location=Місцезнаходження update_theme=Оновити тему update_profile=Оновити профіль +update_language_not_found=Мова '%s' недоступна. update_profile_success=Профіль успішно оновлено. change_username=Ваше Ім'я кристувача було змінено. change_username_prompt=Примітка. Зміни в імені також змінюють URL-адресу облікового запису. +change_username_redirect_prompt=Старе ім'я користувача буде перенаправлено до тих пір, поки воно не буде затверджено. continue=Продовжити cancel=Відмінити language=Мова @@ -492,31 +573,41 @@ keep_email_private_popup=Вашу адресу електронної пошти openid_desc=OpenID дозволяє делегувати аутентифікацію зовнішньому постачальнику послуг. manage_ssh_keys=Керувати SSH ключами +manage_ssh_principals=Управління SSH сертифікатами користувачів manage_gpg_keys=Керувати GPG ключами add_key=Додати ключ ssh_desc=Ці відкриті SSH-ключі пов'язані з вашим обліковим записом. Відповідні приватні ключі дозволяють отримати повний доступ до ваших репозиторіїв. +principal_desc=Ці настройки SSH сертифікатів вказані у вашому обліковому записі та надають повний доступ до ваших репозиторіїв. gpg_desc=Ці публічні ключі GPG пов'язані з вашим обліковим записом. Тримайте свої приватні ключі в безпеці, оскільки вони дозволяють здійснювати перевірку комітів. ssh_helper=Потрібна допомога? Дивіться гід на GitHub з генерації ключів SSH або виправлення типових неполадок SSH. gpg_helper= Потрібна допомога? Перегляньте посібник GitHub про GPG . add_new_key=Додати SSH ключ add_new_gpg_key=Додати GPG ключ +key_content_ssh_placeholder=Починається з 'ssh-ed25519', 'ssh-rsa', 'ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384', 'ecdsa-sha2-nistp521', 'sk-ecdsa-sha2-nistp256@openssh.com', або 'sk-ssh-ed25519@openssh.com' key_content_gpg_placeholder=Починається з '-----BEGIN PGP PUBLIC KEY BLOCK-----' +add_new_principal=Додати користувача ssh_key_been_used=Цей SSH ключ вже був додано до сервера. +ssh_key_name_used=Ключ SSH з таким ім'ям вже існує у вашому обліковому записі. +ssh_principal_been_used=Цей користувач вже був доданий на сервер. gpg_key_id_used=Публічний ключ GPG з таким самим ідентифікатором вже існує. -gpg_no_key_email_found=Цей ключ GPG непридатний для використання з будь-якою електронною адресою, що пов'язана з вашим обліковим записом. subkeys=Підключі key_id=ID ключа key_name=Ім'я ключа key_content=Зміст +principal_content=Зміст add_key_success=SSH ключ '%s' додано. add_gpg_key_success=GPG ключ '%s' додано. +add_principal_success=Було додано SSH сертификат користувача '%s'. delete_key=Видалити ssh_key_deletion=Видалити SSH ключ gpg_key_deletion=Видалити GPG ключ +ssh_principal_deletion=Видалити SSH сертифікат користувача ssh_key_deletion_desc=Видалення ключа SSH скасовує доступ до вашого облікового запису. Продовжити? gpg_key_deletion_desc=Видалення GPG ключа скасовує перевірку підписаних ним комітів. Продовжити? +ssh_principal_deletion_desc=Видалення ключа SSH скасовує доступ до вашого облікового запису. Продовжити? ssh_key_deletion_success=SSH ключ був видалений. gpg_key_deletion_success=GPG було видалено. +ssh_principal_deletion_success=Користувача видалено. add_on=Додано valid_until=Дійсний до valid_forever=Дійсний завжди @@ -526,9 +617,11 @@ can_read_info=Читати can_write_info=Написати key_state_desc=Цей ключ використовувався в останні 7 днів token_state_desc=Цей токен використовувався в останні 7 днів +principal_state_desc=Участник був на сайті в останні 7 днів show_openid=Показати у профілю hide_openid=Не показувати у профілі ssh_disabled=SSH вимкнено +ssh_externally_managed=Цей ключ SSH має зовнішнє управління для цього користувача manage_social=Керувати зв'язаними обліковими записами соціальних мереж social_desc=Ці адреси соціальних мереж пов'язані з вашим обліковим записом Gitea. Переконайтеся, що ви їх впізнаєте, оскільки вони можуть бути використані для входу в обліковий запис Gitea. unbind=Від'єднати @@ -595,6 +688,7 @@ or_enter_secret=Або введіть секрет: %s then_enter_passcode=І введіть пароль, який відображається в додатку: passcode_invalid=Некоректний пароль. Спробуй ще раз. twofa_enrolled=Для вашого облікового запису було включена двофакторна автентифікація. Зберігайте свій scratch-токен (%s) у безпечному місці, оскільки він показується лише один раз! +twofa_failed_get_secret=Не вдалося отримати секрет. u2f_desc=Ключами безпеки є апаратні пристрої що містять криптографічні ключі. Вони можуть бути використані для двофакторної автентифікації. Ключ безпеки повинен підтримувати стандарт FIDO U2F. u2f_require_twofa=Для використання ключів безпеки ваш обліковий запис має використовувати двофакторну автентифікацію. @@ -616,6 +710,7 @@ repos_none=У вас немає власних репозиторіїв delete_account=Видалити ваш обліковий запис delete_prompt=Ця операція остаточно видалить обліковий запис користувача. Це НЕ МОЖЛИВО відмінити. +delete_with_all_comments=Ваш обліковий запис молодший за %s днів. Щоб уникнути коментарів-привидів, всі запити/PR коментрарі будуть видалені з ним. confirm_delete_account=Підтвердження видалення delete_account_title=Видалити цей обліковий запис delete_account_desc=Ви впевнені, що хочете остаточно видалити цей обліковий запис? @@ -625,8 +720,18 @@ email_notifications.onmention=Повідомлення email тільки кол email_notifications.disable=Вимкнути email сповіщення email_notifications.submit=Налаштувати параметри email +visibility=Видимість користувача +visibility.public=Публічний +visibility.public_tooltip=Видимий для всіх користувачів +visibility.limited=Обмежений +visibility.limited_tooltip=Видимий лише для користувачів, що ввійшли в систему +visibility.private=Приватний +visibility.private_tooltip=Видимий лише членам організації + [repo] +new_repo_helper=Репозиторій містить усі файли проекту, включаючи історію ревізій. Ще десь є? Мігрувати репозиторій. owner=Власник +owner_helper=Деякі організації можуть не відображатися у випадаючому списку через максимальну кількість репозиторііїв. repo_name=Назва репозиторію repo_name_helper=Хороші назви репозиторіїв використовують короткі, унікальні ключові слова що легко запам'ятати. repo_size=Розмір репозиторію @@ -647,26 +752,43 @@ use_template=Застосувати цей шаблон generate_repo=Згенерувати репозиторій generate_from=Генерувати з repo_desc=Опис +repo_desc_helper=Введіть короткий опис (опціонально) repo_lang=Мова repo_gitignore_helper=Виберіть шаблон .gitignore. +repo_gitignore_helper_desc=Оберіть з списку мовних шаблонів файли, які не будуть відстежуватись. Типові артефакти, які генеруються за допомогою інструментів побудови кожної мови, за замовчуванням включені до .gitignor. issue_labels=Мітки проблем issue_labels_helper=Вибрати мітку для проблеми. license=Ліцензія license_helper=Виберіть ліцензійний файл. +license_helper_desc=Ліцензія регулює те, що інші можуть і не можуть робити з вашим кодом. Не впевнені, що саме підходить для вашого проекту? Дивіться Виберіть ліцензію. readme=README readme_helper=Виберіть шаблон README. +readme_helper_desc=Це місце, де ви можете написати повний опис вашого проекту. auto_init=Ініціалізувати репозиторій (Додає .gitignore, LICENSE та README) +trust_model_helper=Виберіть модель довіри для підтвердження підпису. Можливі варіанти: +trust_model_helper_collaborator=Співавтор: підписи довіри від співавторів +trust_model_helper_committer=Учасник: довірені підписи участників +trust_model_helper_collaborator_committer=Співавтор+Комітер: довірчі підписи від співавторів, які відповідають комітеру +trust_model_helper_default=За замовчуванням: використовувати стандартну модель довіри для цієї установки create_repo=Створити репозиторій default_branch=Головна гілка +default_branch_helper=Гілка за замовчуванням є базовою гілкою для запитів на злиття та комітів коду. mirror_prune=Очистити mirror_prune_desc=Видалення застарілих посилань які ви відслідковуєте mirror_interval=Інтервал дзеркалювання (допустимі значення 'h', 'm', 's'). 0 - щоб вимкнути автоматичну синхронізацію. mirror_interval_invalid=Інтервал дзеркалювання є неприпустимим. mirror_address=Клонування з URL-адреси -mirror_address_desc=Покласти будь-які необхідні облікові дані у розділі клонування авторизації. +mirror_address_desc=Помістіть будь-які необхідні облікові дані у розділі Авторизація. mirror_address_url_invalid=Надана URL-адреса є неприпустимою. Ви повинні екранувати всі компоненти URL-адреси правильно. mirror_address_protocol_invalid=Надана URL-адреса є неприпустимою. Тільки http(s):// або git:// можливо використовувати при дзеркальні. +mirror_lfs=Склад великих файлів (LFS) +mirror_lfs_desc=Активувати дзеркальне відображення даних LFS. +mirror_lfs_endpoint=Кінцева точка LFS +mirror_lfs_endpoint_desc=Синхронізація спробує використовувати url для клону щоб визначити LFS-сервер. Ви також можете вказати кінцеву точку користувача, якщо дані репозиторію LFS зберігаються в іншому місці. mirror_last_synced=Остання синхронізація +mirror_password_placeholder=(без змін) +mirror_password_blank_placeholder=(відключено) +mirror_password_help=Змініть ім'я користувача, щоб видалити збережений пароль. watchers=Спостерігачі stargazers=Зацікавлені forks=Форки @@ -674,7 +796,22 @@ pick_reaction=Залиште свою оцінку reactions_more=додати %d більше unit_disabled=Адміністратор сайту вимкнув цей розділ репозиторію. language_other=Інші - +adopt_search=Введіть ім'я користувача для пошуку неприйнятних репозиторіїв... (залиште порожнім, щоб знайти всі) +adopt_preexisting_label=Прийняті файли +adopt_preexisting=Прийняти вже існуючі файли +adopt_preexisting_content=Створити репозиторій з %s +adopt_preexisting_success=Прийняти файли та створити репозиторій з %s +delete_preexisting_label=Видалити +delete_preexisting=Видалити існуючі файли +delete_preexisting_content=Видалити файли з %s +delete_preexisting_success=Видалено неприйняті файли в %s + +transfer.accept=Дозволити трансфер +transfer.accept_desc=Перемістити до "%s" +transfer.reject=Відхилити трансфер +transfer.reject_desc=Скасувати переміщення до "%s" +transfer.no_permission_to_accept=У вас немає дозволу на прийняття +transfer.no_permission_to_reject=У вас немає дозволу для відхилення desc.private=Приватний desc.public=Публічний @@ -699,34 +836,66 @@ archive.title=Це архівний репозитарій. Ви можете п archive.issue.nocomment=Це архівний репозитарій. Ви не можете коментувати запити. archive.pull.nocomment=Це архівний репозитарій. Ви не можете коментувати пулл-реквести. +form.reach_limit_of_creation_1=Ви вже досягли ліміту в %d репозиторіїв. +form.reach_limit_of_creation_n=Ви досягли максимальної кількості %d створених репозиторіїв. form.name_reserved=Назву репозиторію '%s' зарезервовано. form.name_pattern_not_allowed=Шаблон '%s' не дозволено в назві репозиторія. -need_auth=Клонувати з авторизацією +need_auth=Авторизація +migrate_options=Параметри міграції +migrate_service=Сервіс міграції +migrate_options_mirror_helper=Цей репозиторій буде дзеркалом +migrate_options_mirror_disabled=Адміністратор вашого сайту вимкнув створення нових дзеркал. +migrate_options_lfs=Перенесення LFS файлів +migrate_options_lfs_endpoint.label=Кінцева точка LFS +migrate_options_lfs_endpoint.description=Міграція буде намагатися використовувати ваш Git віддалено, щоб визначати LFS сервер. Ви також можете вказати свою кінцеву точку, якщо дані репозиторію LFS зберігаються в іншому місці. +migrate_options_lfs_endpoint.description.local=Також підтримуються шляхи на локальному сервері. +migrate_options_lfs_endpoint.placeholder=Залиште порожнім, щоб отримати з клонуванного URL migrate_items=Деталі міграції migrate_items_wiki=Вікі migrate_items_milestones=Етапи migrate_items_labels=Мітки migrate_items_issues=Проблеми migrate_items_pullrequests=Запити на злиття +migrate_items_merge_requests=Запити на злиття migrate_items_releases=Релізи migrate_repo=Перенести репозиторій migrate.clone_address=Міграція / клонувати з URL-адреси migrate.clone_address_desc=URL-адреса HTTP(S) або Git "clone" існуючого репозиторія migrate.clone_local_path=або шлях до локального серверу migrate.permission_denied=Вам не дозволено імпортувати локальні репозиторії. +migrate.permission_denied_blocked=Вам не дозволено імпортувати з заблокованих хостів. +migrate.permission_denied_private_ip=Вам не дозволено імпортувати з приватних IP-адрес. migrate.invalid_local_path=Локальний шлях недійсний. Він не існує або не є каталогом. +migrate.invalid_lfs_endpoint=Помилкова кінцева точка LFS. migrate.failed=Міграція не вдалася: %v +migrate.migrate_items_options=Для перенесення додаткових елементів потрібен токен доступу migrated_from=Перенесено з %[2]s migrated_from_fake=Перенесено з %[1]s +migrate.migrate=Міграція з %s migrate.migrating=Міграція із %s... migrate.migrating_failed=Міграція із %s не вдалася. +migrate.migrating_failed.error=Помилка: %s +migrate.github.description=Міграція даних з Github.com або Github Enterprise. +migrate.git.description=Міграція або відзеркалення git даних з сервісів Git +migrate.gitlab.description=Міграція даних з сервера GitLab.com або окремого сервера gitlab. +migrate.gitea.description=Міграція даних з Gitea.com або окремого сервера Gitea. +migrate.gogs.description=Міграція даних з notabug.org або іншого Gogs сервера. +migrate.migrating_git=Міграція Git даних +migrate.migrating_topics=Міграція тем +migrate.migrating_milestones=Міграція етапів +migrate.migrating_labels=Міграція міток +migrate.migrating_releases=Міграція релізів +migrate.migrating_issues=Міграція проблем +migrate.migrating_pulls=Міграція запитів на злиття mirror_from=дзеркало forked_from=форк від generated_from=згенеровано з fork_from_self=Ви не можете форкнути репозиторій, так як ви його власник. fork_guest_user=Увійдіть, щоб зробити форк репозитарію. +watch_guest_user=Увійдіть, щоб слідкувати за цим репозиторієм. +star_guest_user=Увійдіть, щоб додати в обране цей репозиторій. copy_link=Копіювати copy_link_success=Посилання було скопійоване copy_link_error=Натисніть ⌘-C або Ctrl-C, щоб скопіювати @@ -749,7 +918,9 @@ code=Код code.desc=Доступ до коду, файлів, комітів та гілок. branch=Гілка tree=Дерево +clear_ref=`Очистити поточне посилання` filter_branch_and_tag=Фільтрувати гілку або тег +find_tag=Знайти тег branches=Гілки tags=Теги issues=Проблеми @@ -762,9 +933,14 @@ org_labels_desc_manage=керувати milestones=Етап commits=Коміти commit=Коміт +release=Реліз releases=Релізи +tag=Тег +released_this=випущені релізи file_raw=Неформатований file_history=Історія +file_view_source=Переглянути вихідний код +file_view_rendered=Переглянути відрендерено file_view_raw=Перегляд Raw file_permalink=Постійне посилання file_too_large=Цей файл завеликий щоб бути показаним. @@ -773,6 +949,8 @@ audio_not_supported_in_browser=Ваш браузер не підтримує т stored_lfs=Збережено з Git LFS symbolic_link=Символічне посилання commit_graph=Графік комітів +commit_graph.select=Виберіть гілки +commit_graph.hide_pr_refs=Приховати запити на злиття commit_graph.monochrome=Монохром commit_graph.color=Колір blame=Звинувачення @@ -804,6 +982,7 @@ editor.add=Додати '%s' editor.update=Оновити '%s' editor.delete=Видалити '%s' editor.commit_message_desc=Додати необов'язковий розширений опис… +editor.signoff_desc=Додатиь Signed-off-by комітом в конці повідомлення журналу комітів. editor.commit_directly_to_this_branch=Зробіть коміт прямо в гілку %s. editor.create_new_branch=Створити нову гілку для цього коміту та відкрити запит на злиття. editor.create_new_branch_np=Створити нову гілку для цього коміту. @@ -822,8 +1001,13 @@ editor.file_deleting_no_longer_exists=Видалений файл '%s' біль editor.file_changed_while_editing=Зміст файлу змінився з моменту початку редагування. Натисніть тут , щоб переглянути що було змінено, або закомітьте зміни ще раз, щоб переписати їх. editor.file_already_exists=Файл з назвою "%s" уже існує у цьому репозиторію. editor.commit_empty_file_header=Закомітити порожній файл +editor.commit_empty_file_text=Файл, в комміті порожній. Продовжити? editor.no_changes_to_show=Нема змін для показу. +editor.fail_to_update_file=Не вдалося оновити/створити файл '%s'. +editor.fail_to_update_file_summary=Помилка: editor.push_rejected_no_message=Зміна була відхилена сервером без повідомлення. Будь ласка, перевірте git-хуки. +editor.push_rejected=Зміни було відхилено сервером. Будь ласка, перевірте githook'и. +editor.push_rejected_summary=Повне повідомлення про відмову: editor.add_subdir=Додати каталог… editor.unable_to_upload_files=Не вдалося завантажити файли до '%s' через помилку: %v editor.upload_file_is_locked=Файл '%s' заблоковано %s. @@ -854,6 +1038,9 @@ ext_issues=Зов. Проблеми ext_issues.desc=Посилання на зовнішню систему відстеження проблем. projects=Проєкти +projects.desc=Керуйте проблемами та запитами злиття на дошках проєкту. +projects.description=Опис (необов'язково) +projects.description_placeholder=Опис projects.create=Створити проєкт projects.title=Назва projects.new=Новий проєкт @@ -877,6 +1064,8 @@ projects.board.edit_title=Нова назва дошки projects.board.new_title=Назва нової дошки projects.board.new_submit=Створити projects.board.new=Нова дошка +projects.board.set_default=Встановити за замовчуванням +projects.board.set_default_desc=Встановити цю дошку за замовчуванням для проблем без категорії та витягувань projects.board.delete=Видалити дошку projects.board.deletion_desc=Видалення дошки проєкту перенесе всі пов'язані проблеми в дошку 'Без категорії'. Продовжити? projects.open=Відкрити @@ -913,6 +1102,9 @@ issues.new.clear_assignees=Прибрати виконавеців issues.new.no_assignees=Немає виконавеця issues.new.no_reviewers=Немає рецензентів issues.new.add_reviewer_title=Попросити рецензію +issues.choose.get_started=Початок роботи +issues.choose.blank=Типово +issues.choose.blank_about=Створити задачу із шаблону за замовчуванням. issues.no_ref=Не вказана гілка або тег issues.create=Створити проблему issues.new_label=Нова мітка @@ -924,6 +1116,11 @@ issues.label_templates.info=Ще немає міток. Натисніть 'Но issues.label_templates.helper=Оберіть набір міток issues.label_templates.use=Використовувати набір міток issues.label_templates.fail_to_load_file=Не вдалося завантажити файл шаблона мітки '%s': %v +issues.add_label=додано %s з міткою %s +issues.add_labels=додано %s з мітками %s +issues.remove_label=видалено %s з міткою %s +issues.remove_labels=видалено %s з мітками %s +issues.add_remove_labels=додано %s і видалено %s мітками %s issues.add_milestone_at=`додав(ла) до %s етапу %s` issues.add_project_at=`додав до проєкту %s %s` issues.change_milestone_at=`змінено цільової етап з %s на %s %s` @@ -952,6 +1149,7 @@ issues.filter_type.all_issues=Всі проблеми issues.filter_type.assigned_to_you=Призначене вам issues.filter_type.created_by_you=Створено вами issues.filter_type.mentioning_you=Вас згадано +issues.filter_type.review_requested=Відгук запитано issues.filter_sort=Сортувати issues.filter_sort.latest=Найновіші issues.filter_sort.oldest=Найстаріші @@ -973,7 +1171,10 @@ issues.action_milestone_no_select=Етап відсутній issues.action_assignee=Виконавець issues.action_assignee_no_select=Немає виконавеця issues.opened_by=%[1]s відкрито %[3]s +pulls.merged_by=до %[3] злито %[1]s +pulls.merged_by_fake=%[2]s об'єднаний %[1]s issues.closed_by=закрито %[3]s %[1]s +issues.opened_by_fake=%[2]s відкрив(ла) %[1]s issues.closed_by_fake=закрито %[2]s %[1]s issues.previous=Попередній issues.next=Далі @@ -984,10 +1185,13 @@ issues.commented_at=`прокоментував(ла) %s` issues.delete_comment_confirm=Ви впевнені, що хочете видалити цей коментар? issues.context.copy_link=Скопіювати посилання issues.context.quote_reply=Цитувати відповідь +issues.context.reference_issue=Посилання в новій проблемі issues.context.edit=Редагувати issues.context.delete=Видалити issues.no_content=Тут ще немає жодного змісту. issues.close_issue=Закрити +issues.pull_merged_at=`Злиті коміти %[2]s в %[3]s %[4]s` +issues.manually_pull_merged_at=`%[4]s вручну злив коміти %[2]s в %[3]s` issues.close_comment_issue=Прокоментувати і закрити issues.reopen_issue=Відкрити знову issues.reopen_comment_issue=Прокоментувати та відкрити знову @@ -1006,8 +1210,11 @@ issues.poster=Автор issues.collaborator=Співавтор issues.owner=Власник issues.re_request_review=Повторно попросити рецензію +issues.is_stale=З часу останньої перевірки в цей PR було внесено деякі зміни issues.remove_request_review=Видалити запит рецензування issues.remove_request_review_block=Неможливо видалити запит рецензування +issues.dismiss_review=Відхилити рецензiю +issues.dismiss_review_warning=Ви впевнені, що хочете відхилити цей відгук? issues.sign_in_require_desc=Підпишіться щоб приєднатися до обговорення. issues.edit=Редагувати issues.cancel=Відмінити @@ -1052,12 +1259,17 @@ issues.lock.title=Заблокувати обговорення цієї про issues.unlock.title=Розблокувати обговорення цієї проблеми. issues.comment_on_locked=Ви не можете коментувати заблоковану проблему. issues.tracker=Відстеження часу +issues.start_tracking_short=Запустити таймер issues.start_tracking=Почати відстеження часу issues.start_tracking_history=`почав працювати %s` issues.tracker_auto_close=Таймер буде автоматично зупинено, коли ця проблема буде закрита +issues.tracking_already_started=`Ви вже почали відстежувати час для іншої проблеми!` +issues.stop_tracking=Зупинити таймер issues.stop_tracking_history=`перестав(-ла) працювати %s` +issues.cancel_tracking=Скасувати issues.cancel_tracking_history=`скасував відстеження часу %s` issues.add_time=Вручну додати час +issues.del_time=Видалити цей журнал часу issues.add_time_short=Додати час issues.add_time_cancel=Відмінити issues.add_time_history=`додав(-ла) витрачений час %s` @@ -1073,6 +1285,7 @@ issues.error_modifying_due_date=Не вдалося змінити дату за issues.error_removing_due_date=Не вдалося видалити дату завершення. issues.push_commit_1=додав %d коміт %s issues.push_commits_n=додав %d коміти(-ів) %s +issues.force_push_codes=`примусово залито %[1]s з %[2] до %[4]s %[6]s` issues.due_date_form=рррр-мм-дд issues.due_date_form_add=Додати дату завершення issues.due_date_form_edit=Редагувати @@ -1115,6 +1328,8 @@ issues.review.self.approval=Ви не можете схвалити власни issues.review.self.rejection=Ви не можете надіслати запит на зміну на власний пулл-реквест. issues.review.approve=зміни затверджено %s issues.review.comment=рецензовано %s +issues.review.dismissed=відхилено відгук %s %s +issues.review.dismissed_label=Відхилено issues.review.left_comment=додав коментар issues.review.content.empty=Запрошуючи зміни, ви зобов'язані залишити коментар з поясненнями своїх побажань відносно Pull Request'а. issues.review.reject=зробив запит змін %s @@ -1125,6 +1340,7 @@ issues.review.remove_review_request_self=відмовився рецензува issues.review.pending=Очікування issues.review.review=Рецензії issues.review.reviewers=Рецензенти +issues.review.outdated=Застарілі issues.review.show_outdated=Показати застарілі issues.review.hide_outdated=Приховати застарілі issues.review.show_resolved=Показати вирішене @@ -1133,6 +1349,10 @@ issues.review.resolve_conversation=Завершити обговорення issues.review.un_resolve_conversation=Поновити обговорення issues.review.resolved_by=позначив обговорення завершеним issues.assignee.error=Додано не всіх виконавців через непередбачену помилку. +issues.reference_issue.body=Тіло + +compare.compare_base=основа +compare.compare_head=порівняти pulls.desc=Увімкнути запити на злиття та огляд коду. pulls.new=Новий запит на злиття @@ -1143,6 +1363,7 @@ pulls.compare_compare=pull з pulls.filter_branch=Фільтр по гілці pulls.no_results=Результатів не знайдено. pulls.nothing_to_compare=Ці гілки однакові. Немає необхідності створювати запитів на злиття. +pulls.nothing_to_compare_and_allow_empty_pr=Одинакові гілки. Цей PR буде порожнім. pulls.has_pull_request=`Вже існує запит на злиття між двома цілями: %[2]s#%[3]d` pulls.create=Створити запит на злиття pulls.title_desc=хоче злити %[1]d комітів з %[2]s в %[3]s @@ -1155,19 +1376,28 @@ pulls.reopen_to_merge=Будь ласка перевідкрийте цей за pulls.cant_reopen_deleted_branch=Цей запит не можна повторно відкрити, оскільки гілку видалено. pulls.merged=Злито pulls.merged_as=Запит на злиття був влитиий як %[2]s. +pulls.manually_merged=Ручне злиття +pulls.manually_merged_as=Запит на злиття був вручну злитиий як %[2]s. pulls.is_closed=Запит на злиття було закрито. pulls.has_merged=Запит на злиття було об'єднано. pulls.title_wip_desc=`Почніть заголовок з %s щоб запобігти випадковому злиттю запитів.` -pulls.cannot_merge_work_in_progress=Цей пулл-реквест вже в стадії виконання. Видаліть префікс %s з заголовку після того як роботи будуть завершені +pulls.cannot_merge_work_in_progress=Цей пулл-реквест позначений як прийнятий в опрацювання. +pulls.still_in_progress=Все ще в процесі? +pulls.add_prefix=Додати префікс %s +pulls.remove_prefix=Видалити префікс %s pulls.data_broken=Зміст цього запиту було порушено внаслідок видалення інформації Форком. Цей запит тягнеться через відсутність інформації про вилучення. pulls.files_conflicted=Цей запит має зміни, що конфліктують з цільовою гілкою. pulls.is_checking=Триває перевірка конфліктів, будь ласка обновіть сторінку дещо пізніше. +pulls.is_empty=Ця гілка ідентична з цільовою гілкою. pulls.required_status_check_failed=Деякі необхідні перевірки виконані з помилками. pulls.required_status_check_missing=Декілька з необхідних перевірок відсутні. pulls.required_status_check_administrator=Як адміністратор ви все одно можете об'єднати цей запит на злиття. pulls.blocked_by_approvals=Цей pull-запит ще не має достатньо схвалень. %d від %d схвалень надано. pulls.blocked_by_rejection=Цей запит на злиття має запит змін від офіційного рецензента. +pulls.blocked_by_official_review_requests=Цей pull-запит має офіційні запити на перевірку. pulls.blocked_by_outdated_branch=Цей запит на злиття заблоковано, оскільки він застарів. +pulls.blocked_by_changed_protected_files_1=Цей pull-запит заблоковано, оскільки він змінює захищений файл: +pulls.blocked_by_changed_protected_files_n=Цей pull-запит заблоковано, оскільки він змінює захищені файли: pulls.can_auto_merge_desc=Цей запит можна об'єднати автоматично. pulls.cannot_auto_merge_desc=Цей запит на злиття не може бути злитий автоматично через конфлікти. pulls.cannot_auto_merge_helper=Злийте вручну для вирішення конфліктів. @@ -1179,6 +1409,7 @@ pulls.reject_count_1=%d запит на зміну pulls.reject_count_n=%d запити на зміну pulls.waiting_count_1=очікується %d рецензія pulls.waiting_count_n=очікується %d рецензії(й) +pulls.wrong_commit_id=id коміту повинен бути id коміту в цільовій гілці pulls.no_merge_desc=Цей запити на злиття неможливо злити, оскільки всі параметри об'єднання репозиторія вимкнено. pulls.no_merge_helper=Увімкніть параметри злиття в налаштуваннях репозиторія або злийте запити на злиття вручну. @@ -1189,11 +1420,19 @@ pulls.merge_pull_request=Об'єднати запит на злиття pulls.rebase_merge_pull_request=Зробити Rebase і злити pulls.rebase_merge_commit_pull_request=Rebase та злитя (--no-ff) pulls.squash_merge_pull_request=Об'єднати (Squash) і злити +pulls.merge_manually=Об’єднано вручну +pulls.merge_commit_id=ID коміту злиття pulls.require_signed_wont_sign=Гілка вимагає підписаних комітів, але це злиття не буде підписано pulls.invalid_merge_option=Цей параметр злиття не можна використовувати для цього Pull Request'а. +pulls.merge_conflict=Злиття не вдалося: Був конфлікт при злиття. Підказка: спробуйте іншу стратегію +pulls.merge_conflict_summary=Помилка +pulls.rebase_conflict=Злиття не вдалося: відбувся конфлікт під час злиття: %[1]s. Підказка: спробуйте іншу стратегію +pulls.rebase_conflict_summary=Помилка ; %[2]s
    %[3]s
    pulls.unrelated_histories=Помилка злиття: head та base злиття не мають спільної історії. Підказка: спробуйте іншу стратегію pulls.merge_out_of_date=Помилка злиття: base було оновлено, поки відбувалося злиття. Підказка: спробуйте знову. +pulls.push_rejected=Не вдалося виконати злиття: push було відхилено. Перегляньте githooks для цього репозиторію. +pulls.push_rejected_summary=Повне повідомлення про відмову pulls.push_rejected_no_message=Не вдалося виконати злиття: push було відхилено без повідомлення.
    Перегляньте git-хуки для цього репозиторію pulls.open_unmerged_pull_exists=`Ви не можете знову відкрити, оскільки вже існує запит на злиття (%d) з того ж репозиторія з тією ж інформацією про злиття і в очікуванні.` pulls.status_checking=Деякі перевірки знаходяться на розгляді @@ -1201,18 +1440,24 @@ pulls.status_checks_success=Всі перевірки були успішним pulls.status_checks_warning=Декілька перевірок завершилися з попередженнями pulls.status_checks_failure=Декілька перевірок не були успішними pulls.status_checks_error=Декілька перевірок завершилися з помилками +pulls.status_checks_requested=Обов'язково +pulls.status_checks_details=Подробиці pulls.update_branch=Оновити гілку pulls.update_branch_success=Оновлення гілки пройшло успішно pulls.update_not_allowed=Ви не можете оновити гілку pulls.outdated_with_base_branch=Ця гілка застаріла відносно базової гілки pulls.closed_at=`закрив цей запит на злиття %[2]s` pulls.reopened_at=`повторно відкрив цей запит на злиття %[2]s` +pulls.merge_instruction_hint=`Також можна переглянути інструкції для командного рядка.` +pulls.merge_instruction_step1_desc=У репозиторії вашого проекту перевірте нову гілку і протестуйте зміни. +pulls.merge_instruction_step2_desc=Об'єднати зміни і оновити на Gitea. milestones.new=Новий етап milestones.open_tab=%d відкрито milestones.close_tab=%d закрито milestones.closed=Закрито %s +milestones.update_ago=Оновлено %s назад milestones.no_due_date=Немає дати завершення milestones.open=Відкрити milestones.close=Закрити @@ -1252,6 +1497,7 @@ signing.wont_sign.basesigned=Злиття не буде підписано, ос signing.wont_sign.headsigned=Злиття не буде підписано, оскільки головний коміт не підписано signing.wont_sign.commitssigned=Злиття не буде підписано, оскільки всі пов'язані коміти не підписані signing.wont_sign.approved=Злиття не буде підписано, оскільки PR не затверджено +signing.wont_sign.not_signed_in=Ви не ввійшли ext_wiki=Зов. Вікі ext_wiki.desc=Посилання на зовнішню вікі. @@ -1309,6 +1555,7 @@ activity.closed_issues_count_1=Закрита проблема activity.closed_issues_count_n=Закриті проблеми activity.title.issues_1=%d Проблема activity.title.issues_n=%d Проблеми +activity.title.issues_closed_from=%s закрито %s activity.title.issues_created_by=%s створена(і) %s activity.closed_issue_label=Закрито activity.new_issues_count_1=Нова Проблема @@ -1346,6 +1593,8 @@ activity.git_stats_deletion_n=%d видалені search=Пошук search.search_repo=Пошук репозиторію +search.fuzzy=Неточний +search.match=Збігається search.results=Результати пошуку для "%s" в %s settings=Налаштування @@ -1361,6 +1610,15 @@ settings.hooks=Веб-хуки settings.githooks=Git хуки settings.basic_settings=Базові налаштування settings.mirror_settings=Налаштування дзеркала +settings.mirror_settings.docs=Налаштуйте свій проект, щоб автоматично відправляти/отримувати зміни з іншого репозиторію. Гілки, теги і коміти будуть синхронізуватися автоматично. Як я можу відзеркалити репозиторії? +settings.mirror_settings.mirrored_repository=Віддзеркалений репозиторій +settings.mirror_settings.direction=Напрямок +settings.mirror_settings.direction.pull=Pull +settings.mirror_settings.direction.push=Push +settings.mirror_settings.last_update=Останнє оновлення +settings.mirror_settings.push_mirror.none=Не налаштовано дзеркало push +settings.mirror_settings.push_mirror.remote_url=URL віддаленого репозитарія git +settings.mirror_settings.push_mirror.add=Додати Push дзеркало settings.sync_mirror=Синхронізувати зараз settings.mirror_sync_in_progress=Синхронізуються репозиторії-дзеркала. Зачекайте хвилину і обновіть сторінку. settings.email_notifications.enable=Увімкнути сповіщення email @@ -1369,6 +1627,7 @@ settings.email_notifications.disable=Вимкнути email сповіщення settings.email_notifications.submit=Налаштувати параметри email settings.site=Веб-сайт settings.update_settings=Оновити налаштування +settings.branches.update_default_branch=Оновити гілку за замовчуванням settings.advanced_settings=Додаткові налаштування settings.wiki_desc=Увімкнути репозиторії Вікі settings.use_internal_wiki=Використовувати вбудовані Вікі @@ -1396,6 +1655,8 @@ settings.pulls.allow_merge_commits=Дозволити коміти злиття settings.pulls.allow_rebase_merge=Увімкнути Rebasing коміти перед злиттям settings.pulls.allow_rebase_merge_commit=Ввімкнути Rebase з явним злиттям (--no-ff) settings.pulls.allow_squash_commits=Увімкнути об'єднувати коміти перед злиттям +settings.pulls.allow_manual_merge=Позначити PR як об'єднаний вручну +settings.pulls.enable_autodetect_manual_merge=Увімкнути автовизначення ручного злиття (Примітка: у деяких особливий випадках можуть виникнуть помилки) settings.projects_desc=Увімкнути проєкти у репозиторії settings.admin_settings=Налаштування адміністратора settings.admin_enable_health_check=Включити перевірки працездатності репозиторію (git fsck) @@ -1413,12 +1674,34 @@ settings.convert_fork_notices_1=Ця операція перетворить ф settings.convert_fork_confirm=Перетворити репозиторій settings.convert_fork_succeed=Цей форк успішно перетворено на звичайний репозиторій. settings.transfer=Передати новому власнику +settings.transfer.rejected=Перенесення репозиторію відхилено. +settings.transfer.success=Перенесення репозиторію виконано. +settings.transfer_abort=Скасувати перенесення +settings.transfer_abort_invalid=Ви не можете скасувати неіснуюче перенесення сховища. +settings.transfer_abort_success=Перенесення сховища до %s була успішно скасована. settings.transfer_desc=Передати репозиторій користувачеві або організації, де ви маєте права адміністратора. settings.transfer_form_title=Введіть ім'я репозиторія як підтвердження: +settings.transfer_in_progress=В даний час відбувається перенесення. Будь ласка, скасуйте його, якщо ви бажаєте перенести цей репозиторій іншому користувачу. settings.transfer_notices_1=- Ви втратите доступ до репозиторія, якщо ви переведете його окремому користувачеві. settings.transfer_notices_2=- Ви збережете доступ, якщо новим власником стане організація, власником якої ви є. +settings.transfer_notices_3=- Якщо репозиторій є приватним і передається окремому користувачеві, ця дія гарантує, що користувач має хоча б дозвіл на читаня репозитарію (і при необхідності змінює права дозволів). settings.transfer_owner=Новий власник +settings.transfer_perform=Здіснити перенесення +settings.transfer_started=Цей репозиторій чекає підтвердження перенесення від "%s" settings.transfer_succeed=Репозиторій був перенесений. +settings.signing_settings=Параметри перевірки підпису +settings.trust_model=Модель довіри для підпису +settings.trust_model.default=Модель довіри за замовчуванням +settings.trust_model.default.desc=Використовувати модель довіри репозиторію за замовчуванням для цього сайту. +settings.trust_model.collaborator=Співавтор +settings.trust_model.collaborator.long=Співавтор: підписи довіри від співавторів +settings.trust_model.collaborator.desc=Допустимі підписи співавторів цього репозиторію буде позначано як "довірені" - (якщо вони відповідають комітеру чи ні). В іншому випадку дійсні підписи будуть позначені як «ненадійні», якщо підпис співпадає з комітером і «невідповідні», якщо ні. +settings.trust_model.committer=Коммітер +settings.trust_model.committer.long=Коммітер: Довіряти підписам які відповідають комітерам (Так як і на GitHub, і змусить підписати коміти Gitea в якості коммітера) +settings.trust_model.committer.desc=Дозволені підписи будуть позначені лише "довіреними", якщо вони співпадають з комітером, інакше вони будуть позначені "невідповідними". Це змусить Gitea бути комітером на підписані коміти, а фактичні комітери будут зазначені в Co-authored-by: та Co-committed-by: що будуть вставлені в комміт. Типовий ключ Gitea повинен відповідати користувачу в базі даних. +settings.trust_model.collaboratorcommitter=Співавтор+Коммітер +settings.trust_model.collaboratorcommitter.long=Співавтор+Коммітер: Довіряти підписам від співавторів, які відповідають комітеру +settings.trust_model.collaboratorcommitter.desc=Допустимі підписи співавторів цього репозиторію будуть позначатися як "довірені", якщо вони відповідають комітеру. В іншому випадку дійсні підписи будуть позначені як «ненадійні», якщо підпис співпадає з комітером і як «невідповіді» в іншому випадку. Це змусить Gitea бути відміченим як комітер після підписання фактичним комітером, позначеним Co-Authored-By: і Co-Committed-By: прикріпленим до комміту. Типовий ключ Gitea повинен відповідати користувачу в базі даних. settings.wiki_delete=Видалити вікі-дані settings.wiki_delete_desc=Будьте уважні! Як тільки ви видалите Вікі - шляху назад не буде. settings.wiki_delete_notices_1=- Це назавжди знищить і відключить wiki для %s. @@ -1525,7 +1808,7 @@ settings.event_pull_request_review_desc=Коментар запиту до зл settings.event_pull_request_sync=Запит на злиття синхронізується settings.event_pull_request_sync_desc=Запит до злиття синхронізовано. settings.branch_filter=Фільтр гілок -settings.branch_filter_desc=Білий список повідомлень push гілок, створення гілок та видалення гілок, визначається як glob шаблон. Якщо пустий або *, повідомлення для вісіх гілок ввімкнено. Дівіться github.com/gobwas/glob документацію на синтаксис. Приклад: master, {master,release*}. +settings.branch_filter_desc=Білий список повідомлень для push, створення гілок та видалення гілок, визначається як glob шаблон. Якщо він пустий або містить *, повідомлення для вісіх гілок ввімкнені. Дівіться github.com/gobwas/glob документацію на синтаксис. Наприклад: master, {master,release*}. settings.active=Активний settings.active_helper=Інформацію про викликані події буде надіслано за цією веб-хук URL-адресою. settings.add_hook_success=Веб-хук було додано. @@ -1595,7 +1878,7 @@ settings.dismiss_stale_approvals_desc=Коли нові коміти що змі settings.require_signed_commits=Потрібно підписані коміти settings.require_signed_commits_desc=Відхиляти push до цієї гілки, якщо вони не підписані або підпис неможливо перевірити. settings.protect_protected_file_patterns=Шаблони захищених файлів (розділені крапками з комою '\;'): -settings.protect_protected_file_patterns_desc=Захищені файли, які заборонено змінювати напряму, навіть якщо користувач має дозвіл додавати, редагувати чи видаляти файли у цій гілці. Декілька шаблонів можуть бути розділеними за допомогою крапки з комою ('\;'). github.com/gobwas/glob надає документацію щодо синтаксису шаблонів. Приклади: .drone.yml, /docs/**/*.txt. +settings.protect_protected_file_patterns_desc=Захищені файли, які не дозволено змінювати, навіть якщо користувач має права на додавання, редагування або видалення файлів у цій гілці. Кілька шаблонів можливо розділяти за допомогою крапки з комою ('\;'). Дивіться github.com/gobwas/glob документацію для синтаксису шаблонів. Наприклад: .drone.yml, /docs/**/*.txt. settings.add_protected_branch=Увімкнути захист settings.delete_protected_branch=Вимкнути захист settings.update_protect_branch_success=Налаштування захисту гілки '%s' були успішно змінені. @@ -1604,13 +1887,26 @@ settings.protected_branch_deletion=Відключити захист гілки settings.protected_branch_deletion_desc=Будь-який користувач з дозволами на запис зможе виконувати push в цю гілку. Ви впевнені? settings.block_rejected_reviews=Блокувати злиття при відкидаючих рецензіях settings.block_rejected_reviews_desc=Злиття буде недоступним, якщо є запит змін від офіційних рецензентів, навіть за наявності достатньої кількості схвалень. +settings.block_on_official_review_requests=Блокувати злиття при запиті на офіціальний розгляд +settings.block_on_official_review_requests_desc=Об’єднання неможливе, коли воно має офіційні запити на розгляд, навіть якщо достатньо схвалень. settings.block_outdated_branch=Блокувати злиття, якщо запит на злиття застарів settings.block_outdated_branch_desc=Злиття буде неможливим, коли головна гілка позаду основної. settings.default_branch_desc=Головна гілка є 'базовою' для вашого репозиторія, на яку за замовчуванням спрямовані всі запити на злиття і яка є обличчям вашого репозиторія. Перше, що побачить відвідувач - це зміст головної гілки. Виберіть її з уже існуючих: +settings.default_merge_style_desc=Стиль злиття за замовчуванням: settings.choose_branch=Оберіть гілку… settings.no_protected_branch=Немає захищених гілок. settings.edit_protected_branch=Редагувати settings.protected_branch_required_approvals_min=Число необхідних схвалень не може бути від'ємним. +settings.tags=Мітки +settings.tags.protection=Захист мітки +settings.tags.protection.pattern=Шаблон тега +settings.tags.protection.allowed=Дозволено +settings.tags.protection.allowed.users=Дозволені користувачі +settings.tags.protection.allowed.teams=Дозволені команди +settings.tags.protection.allowed.noone=Ніхто +settings.tags.protection.create=Захистна мітка +settings.tags.protection.none=Там не немає захищених міток. +settings.tags.protection.pattern.description=Ви можете використовувати одне ім'я або глобальний шаблон або регулярний вираз для декількох тегів.. Детальніше в посібнику із захищених тегів. settings.bot_token=Токен для бота settings.chat_id=Чат ID settings.matrix.homeserver_url=URL домашньої сторінки @@ -1624,6 +1920,7 @@ settings.archive.success=Репозиторію успішно присвоєн settings.archive.error=Сталася помилка при спробі архівувати репозиторій. Докладнішу інформацію див. у журналі. settings.archive.error_ismirror=Неможливо архівувати дзеркальний репозиротрій. settings.archive.branchsettings_unavailable=Параметри гілки не доступні, якщо репозиторій архівний. +settings.archive.tagsettings_unavailable=Параметри міток недоступні, якщо репозиторій архівний. settings.unarchive.button=Зняти архівний статус settings.unarchive.header=Зняти архівний статус для репозиторія settings.unarchive.text=Зняття статусу архівного відновить запис в репозиторій, а також відкриє можливість створювати запити з нових проблем та пулл-запити. @@ -1673,7 +1970,9 @@ diff.whitespace_ignore_all_whitespace=Ігнорувати пробіли, по diff.whitespace_ignore_amount_changes=Ігнорувати зміни у кількості пробілів diff.whitespace_ignore_at_eol=Ігнорувати зміни у пробілах в кінці рядка diff.stats_desc= %d змінених файлів з %d додано та %d видалено +diff.stats_desc_file=%d змін: %d доповнень та %d видалень diff.bin=BIN +diff.bin_not_shown=Бінарний файл не відображається. diff.view_file=Переглянути файл diff.file_before=Перед diff.file_after=Після @@ -1681,6 +1980,7 @@ diff.file_image_width=Ширина diff.file_image_height=Висота diff.file_byte_size=Розмір diff.file_suppressed=Різницю між файлами не показано, бо вона завелика +diff.file_suppressed_line_too_long=Різницю між файлами не показано, оскільки один чи декілька рядків занадто довгі diff.too_many_files=Деякі файли не було показано, через те що забагато файлів було змінено diff.comment.placeholder=Залишити коментар diff.comment.markdown_info=Стилізація з markdown підтримується. @@ -1695,13 +1995,20 @@ diff.review.comment=Коментар diff.review.approve=Затвердити diff.review.reject=Запит змін diff.committed_by=зафіксовано +diff.protected=Захищений +diff.image.side_by_side=Пліч-о-пліч +diff.image.swipe=Свайп +diff.image.overlay=Оверлей releases.desc=Відслідковувати версії проекту (релізи) та завантаження. release.releases=Релізи +release.detail=Деталі релізу +release.tags=Теги release.new_release=Новий реліз release.draft=Чернетка release.prerelease=Пре-реліз release.stable=Стабільний +release.compare=Порівняти release.edit=редагувати release.ahead.commits=%d коміт(ів) release.ahead.target=до %s з моменту цього випуску @@ -1720,12 +2027,20 @@ release.publish=Опублікувати реліз release.save_draft=Зберегти чернетку release.edit_release=Оновити реліз release.delete_release=Видалити реліз +release.delete_tag=Видалити тег release.deletion=Видалити реліз +release.deletion_desc=Видалення релізу видаляє його тільки з Gitea. Git теги, вміст репозиторію і історія залишаться незмінними. Продовжити? release.deletion_success=Реліз, було видалено. +release.deletion_tag_desc=Буде видалено цей тег із репозиторію. Вміст репозиторія та історія залишаться незмінними. Продовжити? +release.deletion_tag_success=Мітка видалена. release.tag_name_already_exist=Реліз з цим ім'ям мітки вже існує. release.tag_name_invalid=Неприпустиме ім'я тега. +release.tag_name_protected=Ім'я тега захищене. +release.tag_already_exist=Цей тег вже використовується. release.downloads=Завантажити release.download_count=Завантаження: %s +release.add_tag_msg=Використовуйте заголовок і зміст релізу як повідомлення як тег повідомлення. +release.add_tag=Створити тільки мітку branch.name=Ім'я гілки branch.search=Пошук гілок @@ -1752,13 +2067,22 @@ branch.restore=Відновити гілку '%s' branch.download=Завантажити гілку '%s' branch.included_desc=Ця гілка є частиною типової гілки branch.included=Включено +branch.create_new_branch=Створити гілку з гілки: +branch.confirm_create_branch=Створити гілку +branch.new_branch=Створити нову гілку +branch.new_branch_from=Створити нову гілку з '%s' +tag.create_tag=Створити тег %s +tag.create_success=Тег '%s' був створений. topic.manage_topics=Керувати тематичними мітками topic.done=Готово topic.count_prompt=Ви не можете вибрати більше 25 тем topic.format_prompt=Теми мають розпочинатися з літери або цифри, можуть містити дефіси (-) і не повинні перевищувати 35 символів. +error.csv.too_large=Не вдається відобразити цей файл, тому що він завеликий. +error.csv.unexpected=Не вдається відобразити цей файл, тому що він містить неочікуваний символ в рядку %d і стовпці %d. +error.csv.invalid_field_count=Не вдається відобразити цей файл, тому що він має неправильну кількість полів у рядку %d. [org] org_name_holder=Назва організації @@ -1796,11 +2120,14 @@ settings.repoadminchangeteam=Адміністратор репозитарію settings.visibility=Видимість settings.visibility.public=Публічний settings.visibility.limited=Обмежений (Видимий лише для користувачів, що ввійшли в систему) +settings.visibility.limited_shortname=Обмежений settings.visibility.private=Приватний (Видимий лише членам організації) +settings.visibility.private_shortname=Приватний settings.update_settings=Оновити налаштування settings.update_setting_success=Налаштування організації оновлені. settings.change_orgname_prompt=Ця зміна змінить посилання на організацію. +settings.change_orgname_redirect_prompt=Старе ім'я буде перенаправлено до тих пір, поки воно не буде заброньовано. settings.update_avatar_success=Аватар організації оновлений. settings.delete=Видалити організацію settings.delete_account=Видалити цю організацію @@ -1872,6 +2199,7 @@ dashboard=Панель управління users=Облікові записи користувачів organizations=Організації repositories=Репозиторії +hooks=Веб-хуки authentication=Джерела автентифікації emails=Електронні адреси Користувача config=Конфігурація @@ -1903,6 +2231,7 @@ dashboard.cron.error=Помилка в Cron: %s: %[3]s dashboard.cron.finished=Cron: %[1]s завершено dashboard.delete_inactive_accounts=Видалити всі неактивовані облікові записи dashboard.delete_inactive_accounts.started=Запущено завдання видалення всі неактивованих облікових записів. +dashboard.delete_repo_archives=Видалити всі архіви репозиторіїв (ZIP, TAR.GZ, і т. д..) dashboard.delete_repo_archives.started=Запущено завдання видалення всіх архівів репозиторіїв. dashboard.delete_missing_repos=Видалити всі записи про репозиторії з відсутніми файлами Git dashboard.delete_missing_repos.started=Запущено завдання видалення всіх репозиторіїв, в яких відсутні файли Git. @@ -1916,9 +2245,12 @@ dashboard.update_migration_poster_id=Оновити мігровані ID авт dashboard.git_gc_repos=Виконати очистку сміття для всіх репозиторіїв dashboard.resync_all_sshkeys=Оновити файл '.ssh/authorized_keys' з SSH ключами Gitea. dashboard.resync_all_sshkeys.desc=(Не потрібне при використанні вбудованого сервера SSH.) +dashboard.resync_all_sshprincipals=Оновіть файл '.ssh/authorized_princтipals' з SSH даними користувача Gitea. +dashboard.resync_all_sshprincipals.desc=(Не потрібно для вбудованого SSH серверу.) dashboard.resync_all_hooks=Пересинхронізувати перед-прийнятні, оновлюючі та пост-прийнятні хуки в усіх репозиторіях. dashboard.reinit_missing_repos=Переініціалізувати усі репозитрії git-файли яких втрачено dashboard.sync_external_users=Синхронізувати дані зовнішніх користувачів +dashboard.cleanup_hook_task_table=Очистити hook_task таблицю dashboard.server_uptime=Uptime серверу dashboard.current_goroutine=Поточна кількість Goroutines dashboard.current_memory_usage=Поточне використання пам'яті @@ -1948,6 +2280,8 @@ dashboard.total_gc_time=Загальна пауза збирача сміття dashboard.total_gc_pause=Загальна пауза збирача сміття (GC) dashboard.last_gc_pause=Остання пауза збирача сміття (GC) dashboard.gc_times=Кількість запусків збирача сміття (GC) +dashboard.delete_old_actions=Видалити всі старі дії з бази даних +dashboard.delete_old_actions.started=Видалення всіх старі дії з бази даних розпочато. users.user_manage_panel=Керування обліковими записами користувачів users.new_account=Створити обліковий запис @@ -1977,6 +2311,7 @@ users.prohibit_login=Вимкнути вхід users.is_admin=Адміністратор users.is_restricted=Обмежений users.allow_git_hook=Може створювати Git хуки +users.allow_git_hook_tooltip=Git хуки виконуються від імені користувача OS сервісу Gitea і мають однаковий рівень доступу до хоста. Як результат, користувачі з доступом до Git-хуків можуть отримати доступ і змінювати всі репозиторії Gitea, а також базу даних, що використовуються в Gitea. Отже, вони також здатні отримати права адміністратора Gitea. users.allow_import_local=Може імпортувати локальні репозиторії users.allow_create_organization=Може створювати організацій users.update_profile=Оновити обліковий запис @@ -1984,6 +2319,7 @@ users.delete_account=Видалити цей обліковий запис users.still_own_repo=Ваш обліковий запис все ще володіє одним або кількома репозиторіями, спочатку вам потрібно видалити або передати їх. users.still_has_org=Цей обліковий запис все ще є учасником однієї або декількох організацій. Для продовження, покиньте або видаліть організації. users.deletion_success=Обліковий запис користувача було видалено. +users.reset_2fa=Скинути 2FA emails.email_manage_panel=Управління поштою користувача emails.primary=Головний @@ -2005,6 +2341,8 @@ orgs.members=Учасники orgs.new_orga=Нова організація repos.repo_manage_panel=Керування репозиторіями +repos.unadopted=Неприйняті репозиторії +repos.unadopted.no_more=Не знайдено більше неприйнятих репозиторіїв repos.owner=Власник repos.name=Назва repos.private=Приватний @@ -2014,8 +2352,13 @@ repos.forks=Форки repos.issues=Проблеми repos.size=Розмір +defaulthooks=Веб-хуки за замовчуванням +defaulthooks.desc=Веб-хуки автоматично створюють HTTP POST-запити до сервера, коли виконуються певні події Gitea. Визначені тут веб-хуки є типовими і копіюються у всі нові сховища. Детальніше читайте в інструкції по використанню web-хуків. +defaulthooks.add_webhook=Додати веб-хук за замовчуванням +defaulthooks.update_webhook=Змінити веб-хук за замовчуванням systemhooks=Системні вебхуки +systemhooks.desc=Веб-хуки автоматично створюють HTTP POST-запити до сервера, коли виконуються певні тригери в Gitea. Визначені веб-хуки є типовими і копіюються у всі нові сховища. Детальніше читайте в інструкції по використанню web-хуків. systemhooks.add_webhook=Додати системний вебхук systemhooks.update_webhook=Оновити системний вебхук @@ -2034,7 +2377,6 @@ auths.host=Хост auths.port=Порт auths.bind_dn=Прив'язати DN auths.bind_password=Прив'язати пароль -auths.bind_password_helper=Попередження: цей пароль зберігається у вигляді простого тексту. Використовуйте обліковий запис тільки для читання, якщо це можливо. auths.user_base=База пошуку користувачів auths.user_dn=DN користувача auths.attribute_username=Атрибут імені користувача @@ -2051,6 +2393,11 @@ auths.filter=Користувацький фільтр auths.admin_filter=Фільтр адміністратора auths.restricted_filter=Обмежуючий фільтр auths.restricted_filter_helper=Залиште пустим, щоб не встановлювати обмеження на жодного з користувачів. Використовуйте зірочку ("*') щоб встановити обмеження на всіх користувачів, які не відповідають фільтру Адміністратора. +auths.verify_group_membership=Перевірити членство в групах в LDAP +auths.group_search_base=Пошукова база груп DN +auths.valid_groups_filter=Допустимий фільтр груп +auths.group_attribute_list_users=Атрибут групи зі списком користувачів +auths.user_attribute_in_group=Атрибути користувача в групі auths.ms_ad_sa=Атрибути пошуку MS AD auths.smtp_auth=Тип автентифікації SMTP auths.smtphost=SMTP хост @@ -2060,7 +2407,9 @@ auths.allowed_domains_helper=Залиште порожнім, щоб дозво auths.enable_tls=Увімкнути TLS-шифрування auths.skip_tls_verify=Пропустити перевірку TLS auths.pam_service_name=Ім'я служби PAM +auths.pam_email_domain=Поштовий домен PAM (необов'язково) auths.oauth2_provider=Постачальник OAuth2 +auths.oauth2_icon_url=URL іконки auths.oauth2_clientID=ID клієнта (ключ) auths.oauth2_clientSecret=Ключ клієнта auths.openIdConnectAutoDiscoveryURL=OpenID Connect URL для автоматизації входу @@ -2096,6 +2445,7 @@ auths.tip.twitter=Перейдіть на https://dev.twitter.com/apps, ство auths.tip.discord=Зареєструйте новий додаток на https://discordapp.com/developers/applications/me auths.tip.gitea=Зареєструйте новий додаток OAuth2. Керівництво можна знайти на https://docs.gitea.io/en-us/oauth2-provider/ auths.tip.yandex=Створіть нову програму в https://oauth.yandex.com/client/new. Виберіть наступні дозволи з "Yandex. assport API": "Доступ до адреси електронної пошти", "Доступ до аватара" і "Доступ до імені користувача, імені та прізвища, статі" +auths.tip.mastodon=Введіть URL спеціального екземпляра для екземпляра mastodon, який ви хочете автентифікувати за допомогою (або використовувати за замовчуванням) auths.edit=Редагувати джерело автентифікації auths.activated=Ця аутентифікація активована auths.new_success=Метод аутентифікації '%s' був доданий. @@ -2157,6 +2507,7 @@ config.db_path=Шлях config.service_config=Конфігурація сервісу config.register_email_confirm=Потрібно підтвердити електронну пошту для реєстрації config.disable_register=Вимкнути самостійну реєстрацію +config.allow_only_internal_registration=Дозволити реєстрацію тільки через Gitea config.allow_only_external_registration=Дозволити реєстрацію тільки через сторонні сервіси config.enable_openid_signup=Увімкнути самостійну реєстрацію за допомогою OpenID config.enable_openid_signin=Увімкнути реєстрацію за допомогою OpenID @@ -2350,6 +2701,9 @@ mirror_sync_delete=синхронізовано й видалено посила approve_pull_request=`схвалив %s#%[2]s` reject_pull_request=`запропонував зміни до %s#%[2]s` publish_release=`опублікував випуск "%[4]s" з %[3]s` +review_dismissed=`відхилений відгук від %[4] у %[3]s#%[2]s` +review_dismissed_reason=Причина: +create_branch=створено гілку %[3]s у %[4]s [tool] ago=%s тому diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index d6661f9ef01a..7c4cadb4dbd1 100644 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -83,6 +83,7 @@ add=添加 add_all=添加所有 remove=移除 remove_all=移除所有 +edit=编辑 write=撰写 preview=预览 @@ -91,11 +92,16 @@ loading=正在加载... step1=第一步: step2=第二步: +error=错误 error404=您正尝试访问的页面 不存在您尚未被授权 查看该页面。 +never=从不 + [error] occurred=发生错误 report_message=如果您确定这是一个 Gitea bug,请在 GitHub 上搜索问题,并在必要时打开新问题。 +missing_csrf=错误的请求:没有 CSRF 令牌 +invalid_csrf=错误的请求:无效的 CSRF 令牌 [startpage] app_desc=一款极易搭建的自助 Git 服务 @@ -299,7 +305,8 @@ openid_connect_desc=所选的 OpenID URI 未知。在这里关联一个新帐户 openid_register_title=创建新帐户 openid_register_desc=所选的 OpenID URI 未知。在这里关联一个新帐户。 openid_signin_desc=输入您的 OpenID URI。例如: https://anne.me、bob.openid.org.cn 或 gnusocial.net/carry。 -disable_forgot_password_mail=帐户恢复功能已被禁用。请与网站管理员联系。 +disable_forgot_password_mail=由于未设置电子邮件,帐户恢复被禁用。 请联系您的站点管理员。 +disable_forgot_password_mail_admin=帐户恢复仅在设置电子邮件后可用。 请设置电子邮件以启用帐户恢复。 email_domain_blacklisted=您不能使用您的电子邮件地址注册。 authorize_application=应用授权 authorize_redirect_notice=如果您授权此应用,您将会被重定向到 %s。 @@ -313,19 +320,64 @@ password_pwned=此密码出现在 %s 您好, + activate_account=请激活您的帐户 +activate_account.title=%s,请激活您的帐户 +activate_account.text_1=%[1]s 您好,感谢注册 %[2]s ! +activate_account.text_2=请点击以下链接激活您在 %s 的帐户: + activate_email=请验证您的邮箱地址 +activate_email.title=%s,请验证您的电子邮件地址 +activate_email.text=请点击以下链接,以验证你的电子邮件地址在 %s 中 + +register_notify=欢迎来到 Gitea +register_notify.title=%[1]s,欢迎来到 %[2]s +register_notify.text_1=这是您的 %s 注册确认电子邮件 ! +register_notify.text_2=您现在可以以用户名 %s 登录。 +register_notify.text_3=如果此账户已为您创建,请先 设置您的密码。 + reset_password=恢复您的账户 +reset_password.title=%s,您已请求恢复您的帐户 +reset_password.text=请点击以下链接,恢复你在 %s 的账户: + register_success=注册成功 -register_notify=欢迎来到 Gitea + +issue_assigned.pull=@%[1]s 已将代码库 %[3]s 中的合并请求 %[2]s 指派给您 +issue_assigned.issue=@%[1]s 已将代码库 %[3]s 中的工单 %[2]s 指派给您 + +issue.x_mentioned_you=@%s 提到了您: +issue.action.force_push=%[1]s 强制从 %[3]s 推送 %[2]s 至 [4]s。 +issue.action.push_1=@%[1]s 推送了 %[3]d 个提交到 %[2]s +issue.action.push_n=@%[1]s 推送了 %[3]d 个提交到 %[2]s +issue.action.close=@%[1]s 关闭了 #%[2]d. +issue.action.reopen=@%[1]s 重新打开了 #%[2]d. +issue.action.merge=@%[1]s 将 #%[2]d 合并到 #%[3]s。 +issue.action.approve=@%[1]s 批准了此合并请求。 +issue.action.reject=@%[1]s 请求更改此合并请求。 +issue.action.review=@%[1]s 评论了这个合并请求。 +issue.action.review_dismissed=@%[1]s 拒绝了 %[2]s 对此合并请求的上个审核。 +issue.action.ready_for_review=@%[1]s 标记此合并请求已评审通过。 +issue.action.new=@%[1]s 创建了 #%[2]d. +issue.in_tree_path=在 %s 中: release.new.subject=%[2]s 中的 %[1]s 发布了 +release.new.text=@%[1]s 于 %[3]s 发布了 %[2]s +release.title=标题: %s +release.note=注释: +release.downloads=下载: +release.download.zip=源代码 (ZIP) +release.download.targz=源代码 (TAR.GZ) repo.transfer.subject_to=%s 想要将 "%s" 转让给 %s repo.transfer.subject_to_you=%s 想要将 "%s" 转让给你 repo.transfer.to_you=你 +repo.transfer.body=访问 %s 以接受或拒绝转移,亦可忽略此邮件。 repo.collaborator.added.subject=%s 把你添加到了 %s +repo.collaborator.added.text=您已被添加为代码库的协作者: [modal] yes=确认操作 @@ -366,6 +418,7 @@ email_error=不是一个有效的邮箱地址。 url_error=不是一个有效的 URL。 include_error=必须包含子字符串 '%s'。 glob_pattern_error=`匹配模式无效:%s.` +regex_pattern_error=`正则表达式无效:%s.` unknown_error=未知错误: captcha_incorrect=验证码不正确。 password_not_match=密码不匹配。 @@ -542,7 +595,16 @@ ssh_key_been_used=此 SSH 密钥已添加到服务器。 ssh_key_name_used=使用相同名称的SSH公钥已经存在! ssh_principal_been_used=此规则已经加入到了服务器。 gpg_key_id_used=使用相同名称的GPG公钥已经存在! -gpg_no_key_email_found=此 GPG 公钥没有使用任何你的电子邮箱地址。 +gpg_no_key_email_found=此 GPG 密钥与您帐户关联的任何已激活电子邮件地址均不匹配。如果您在提供的令牌上签名,它仍然可以被添加。 +gpg_key_matched_identities=匹配的身份: +gpg_key_verified=已验证的密钥 +gpg_key_verified_long=密钥已经用令牌进行了验证,并且可以用来验证匹配此用户任何已激活电子邮件地址的提交,以及匹配此密钥的任何身份。 +gpg_key_verify=验证 +gpg_invalid_token_signature=提供的 GPG 密钥、签名和令牌不匹配或过期。 +gpg_token_required=您必须为下面的令牌提供签名 +gpg_token_help=您可以使用以下方式生成签名: +key_signature_gpg_placeholder=以 '-----BEGIN PGP PUBLIC KEY BLOCK-----' 开头 +verify_gpg_key_success=GPG 密钥 %s 已被验证。 subkeys=子项 key_id=键ID key_name=密钥名称 @@ -673,6 +735,14 @@ email_notifications.onmention=只在被提到时邮件通知 email_notifications.disable=停用邮件通知 email_notifications.submit=邮件通知设置 +visibility=用户可见性 +visibility.public=公开 +visibility.public_tooltip=所有用户可见 +visibility.limited=受限 +visibility.limited_tooltip=仅登录用户可见 +visibility.private=私有 +visibility.private_tooltip=仅对组织成员可见 + [repo] new_repo_helper=仓库包含所有项目文件,包括修订历史。已经在别处有了吗? 迁移代码库 owner=拥有者 @@ -723,7 +793,7 @@ mirror_prune_desc=删除过时的远程跟踪引用 mirror_interval=镜像间隔 (有效时间单位为 "h"、"m"、"s")。0将禁用自动同步。 mirror_interval_invalid=镜像间隔无效。 mirror_address=从URL克隆 -mirror_address_desc=在 Clone 认证部分里输入必要的信息。 +mirror_address_desc=在授权框中输入必要的凭据。 mirror_address_url_invalid=URL无效。请检查您所输入的URL是否正确。 mirror_address_protocol_invalid=提供的 url 无效。只能从 http(s):// 或 git:// 位置进行镜像。 mirror_lfs=大文件存储 (LFS) @@ -731,6 +801,9 @@ mirror_lfs_desc=镜像 LFS 数据。 mirror_lfs_endpoint=LFS 网址 mirror_lfs_endpoint_desc=同步将尝试使用克隆网址来 确定 LFS 服务器。如果仓库 LFS 数据存储在其他位置,你还可以指定自定义网址。 mirror_last_synced=上次同步 +mirror_password_placeholder=(未更改) +mirror_password_blank_placeholder=(未设置) +mirror_password_help=更改用户名以删除已储存的密码。 watchers=关注者 stargazers=称赞者 forks=派生仓库 @@ -747,6 +820,7 @@ delete_preexisting_label=刪除 delete_preexisting=删除已存在的文件 delete_preexisting_content=删除 %s 中的文件 delete_preexisting_success=删除 %s 中未收录的文件 +blame_prior=查看此更改前的 blame transfer.accept=接受转移 transfer.accept_desc=转移到 "%s" @@ -783,7 +857,7 @@ form.reach_limit_of_creation_n=你已经达到了 %d 个仓库的上限。 form.name_reserved=仓库名称 '%s' 是被保留的。 form.name_pattern_not_allowed=仓库名称中不允许使用模式 "%s"。 -need_auth=需要授权验证 +need_auth=授权 migrate_options=迁移选项 migrate_service=迁移服务 migrate_options_mirror_helper=该仓库将是一个 镜像 @@ -817,11 +891,19 @@ migrated_from_fake=从 %[1]s 迁移成功 migrate.migrate=从 %s 迁移 migrate.migrating=正在从 %s 迁移... migrate.migrating_failed=从 %s 迁移失败。 +migrate.migrating_failed.error=错误:%s migrate.github.description=从 Github.com 或者 Github Enterprise 迁移数据 migrate.git.description=从 Git 迁移数据 migrate.gitlab.description=从 GitLab.com 或者 自部署 GitLab 迁移数据 migrate.gitea.description=从 Gitea.com 或 自托管 Gitea 服务器迁移数据。 migrate.gogs.description=正从 notabug.org 或其他自托管 Gogs 服务器迁移数据。 +migrate.migrating_git=迁移Git数据 +migrate.migrating_topics=迁移主题 +migrate.migrating_milestones=迁移里程碑 +migrate.migrating_labels=迁移标签 +migrate.migrating_releases=迁移发布 +migrate.migrating_issues=迁移工单 +migrate.migrating_pulls=迁移合并请求 mirror_from=镜像自地址 forked_from=派生自 @@ -854,6 +936,7 @@ branch=分支 tree=目录树 clear_ref=`清除当前引用` filter_branch_and_tag=过滤分支或标签 +find_tag=查找Git标签 branches=分支列表 tags=标签列表 issues=工单 @@ -1051,8 +1134,8 @@ issues.label_templates.use=使用标签集 issues.label_templates.fail_to_load_file=加载标签模板文件 '%s' 时发生错误:%v issues.add_label=于 %[2]s 添加了标签 %[1]s issues.add_labels=于 %s 添加 %s 标签 -issues.remove_label=已删除 %s 标签 %s -issues.remove_labels=已删除 %s 标签 %s +issues.remove_label=于 %[2]s 删除了标签 %[1]s +issues.remove_labels=于 %[2]s 删除了标签 %[1]s issues.add_remove_labels=于 %[3]s 添加了标签 %[1]s ,删除了标签 %[2]s issues.add_milestone_at=`于 %[2]s 添加了里程碑 %[1]s` issues.add_project_at=`将此添加到 %s 项目 %s` @@ -1284,6 +1367,9 @@ issues.review.resolved_by=标记问题为已解决 issues.assignee.error=因为未知原因,并非所有的指派都成功。 issues.reference_issue.body=内容 +compare.compare_base=基准分支 +compare.compare_head=比较 + pulls.desc=启用合并请求和代码评审。 pulls.new=创建合并请求 pulls.compare_changes=创建合并请求 @@ -1302,7 +1388,7 @@ pulls.change_target_branch_at=将目标分支从 %s 更改为 %s % pulls.tab_conversation=对话内容 pulls.tab_commits=代码提交 pulls.tab_files=文件变动 -pulls.reopen_to_merge=请重新打开此拉请求执行合并。 +pulls.reopen_to_merge=请重新创建此合并请求。 pulls.cant_reopen_deleted_branch=无法重新打开此合并请求,因为分支已删除。 pulls.merged=已合并 pulls.merged_as=该合并请求已作为 %[2]s 被合并。 @@ -1311,7 +1397,10 @@ pulls.manually_merged_as=合并请求已被手动合并为 标题以 %s 开头以免合并请求意外合并。` -pulls.cannot_merge_work_in_progress=这个合并请求被标记为尚未完成的工作。完成后请从标题中移除%s前缀。 +pulls.cannot_merge_work_in_progress=此合并请求被标记为正在进行的工作。 +pulls.still_in_progress=仍在进行中? +pulls.add_prefix=添加 %s 前缀 +pulls.remove_prefix=删除 %s 前缀 pulls.data_broken=此合并请求因为派生仓库信息缺失而中断。 pulls.files_conflicted=此合并请求有变更与目标分支冲突。 pulls.is_checking=正在进行合并冲突检测,请稍后再试。 @@ -1524,7 +1613,7 @@ search.fuzzy=模糊 search.match=匹配 search.results=在 %s 中搜索 "%s" 的结果 -settings=仓库设置 +settings=设置 settings.desc=设置是你可以管理仓库设置的地方 settings.options=仓库 settings.collaboration=协作者 @@ -1533,10 +1622,19 @@ settings.collaboration.write=可写权限 settings.collaboration.read=可读权限 settings.collaboration.owner=所有者 settings.collaboration.undefined=未定义 -settings.hooks=管理 Web 钩子 +settings.hooks=Web 钩子 settings.githooks=管理 Git 钩子 settings.basic_settings=基本设置 settings.mirror_settings=镜像设置 +settings.mirror_settings.docs=将你的项目设置成自动从其它存储库推送或拉取变更。分支、标签以及提交将会自动同步。如何镜像存储库? +settings.mirror_settings.mirrored_repository=镜像库 +settings.mirror_settings.direction=方向 +settings.mirror_settings.direction.pull=拉取 +settings.mirror_settings.direction.push=推送 +settings.mirror_settings.last_update=最后更新 +settings.mirror_settings.push_mirror.none=未配置推送镜像 +settings.mirror_settings.push_mirror.remote_url=Git 远程存储库链接 +settings.mirror_settings.push_mirror.add=添加推送镜像 settings.sync_mirror=同步 settings.mirror_sync_in_progress=镜像同步正在进行中,请稍后后再试。 settings.email_notifications.enable=启用邮件通知 @@ -1545,6 +1643,7 @@ settings.email_notifications.disable=停用邮件通知 settings.email_notifications.submit=邮件通知设置 settings.site=网站 settings.update_settings=更新仓库设置 +settings.branches.update_default_branch=更新默认分支 settings.advanced_settings=高级设置 settings.wiki_desc=启用仓库百科 settings.use_internal_wiki=使用内置百科 @@ -1574,6 +1673,7 @@ settings.pulls.allow_rebase_merge_commit=启用变基显式合并 (--no-ff) settings.pulls.allow_squash_commits=启用Squash合并提交 settings.pulls.allow_manual_merge=允许将合并请求标记为手动合并 settings.pulls.enable_autodetect_manual_merge=启用自动检测手动合并 (注意:在某些特殊情况下可能发生错误判断) +settings.pulls.default_delete_branch_after_merge=默认合并后删除合并请求分支 settings.projects_desc=启用仓库项目 settings.admin_settings=管理员设置 settings.admin_enable_health_check=启用仓库健康检查 (git fsck) @@ -1601,6 +1701,7 @@ settings.transfer_form_title=输入仓库名称以做确认: settings.transfer_in_progress=当前正在进行转让。 如果你想将此代码库转让给另一个用户,请取消它。 settings.transfer_notices_1=-如果将其传输给单个用户, 您将失去对存储库的访问权限。 settings.transfer_notices_2=-如果将其转移到您 (共同) 拥有的组织,您可以继续访问该仓库。 +settings.transfer_notices_3=- 如果存储库是私有的并且被转移给某个用户,那么此操作可以确保该用户至少具有读权限(以及必要时的更改权限)。 settings.transfer_owner=新拥有者 settings.transfer_perform=执行转让 settings.transfer_started=该代码库已被标记为转让并等待来自 %s 的确认 @@ -1724,7 +1825,7 @@ settings.event_pull_request_review_desc=合并请求被批准、拒绝或提出 settings.event_pull_request_sync=合并请求被同步 settings.event_pull_request_sync_desc=合并请求被同步。 settings.branch_filter=分支过滤 -settings.branch_filter_desc=推送、创建,删除分支事件白名单,支持匹配符。如果为空或者 *,所有分支的事件均被触发。语法参见 github.com/gobwas/glob 。示例: Master, {master,release*}。 +settings.branch_filter_desc=推送、创建,删除分支事件的分支白名单,使用 glob 模式匹配指定。若为空或 *,则将报告所有分支的事件。语法文档见 github.com/gobwas/glob。示例:master,{master,release*}。 settings.active=激活 settings.active_helper=触发事件的信息将发送到此 webhook 网址。 settings.add_hook_success=Web 钩子添加成功! @@ -1743,7 +1844,7 @@ settings.add_telegram_hook_desc=将 Telegram 集成到您的仓 settings.add_matrix_hook_desc=将 Matrix 集成到您的仓库中。 settings.add_msteams_hook_desc=将 Microsoft Teams 集成到您的仓库中。 settings.add_feishu_hook_desc=将 Feishu 集成到您的仓库中。 -settings.deploy_keys=管理部署密钥 +settings.deploy_keys=部署密钥 settings.add_deploy_key=添加部署密钥 settings.deploy_key_desc=部署密钥具有对仓库的只读拉取权限。 settings.is_writable=启用写权限 @@ -1757,7 +1858,7 @@ settings.add_key_success=部署密钥 '%s' 添加成功。 settings.deploy_key_deletion=删除部署密钥 settings.deploy_key_deletion_desc=删除部署密钥将吊销对此存储库的访问权限。继续? settings.deploy_key_deletion_success=部署密钥已删除。 -settings.branches=分支列表 +settings.branches=分支 settings.protected_branch=分支保护 settings.protected_branch_can_push=允许推吗? settings.protected_branch_can_push_yes=你可以推 @@ -1794,7 +1895,7 @@ settings.dismiss_stale_approvals_desc=当新的提交更改合并请求内容被 settings.require_signed_commits=需要签名提交 settings.require_signed_commits_desc=拒绝推送未签名或无法验证的提交到分支 settings.protect_protected_file_patterns=受保护的文件模式(使用分号分隔) -settings.protect_protected_file_patterns_desc=即使用户有权添加、编辑或删除此分支中的文件,不允许直接更改受保护文件。 可以使用分号 ('\;') 分隔多个模式。 见 github.com/gobwas/glob 文档以获取图案语法。例如: .drone.yml, /docs/**/*.txt +settings.protect_protected_file_patterns_desc=即使用户有权在此分支中添加、编辑或删除文件,也不允许直接更改受保护文件。 可以使用分号分隔多个模式 ('\;')。语法文档见 github.com/gobwas/glob。示例:.drone.yml/docs/**/*.txt。 settings.add_protected_branch=启用保护 settings.delete_protected_branch=禁用保护 settings.update_protect_branch_success=分支 "%s" 的分支保护已更新。 @@ -1813,6 +1914,16 @@ settings.choose_branch=选择一个分支... settings.no_protected_branch=没有受保护的分支 settings.edit_protected_branch=编辑 settings.protected_branch_required_approvals_min=所需的审批数不能为负数。 +settings.tags=标签 +settings.tags.protection=Git标签保护 +settings.tags.protection.pattern=Git标签模式 +settings.tags.protection.allowed=允许列表 +settings.tags.protection.allowed.users=允许的账号 +settings.tags.protection.allowed.teams=允许的团队 +settings.tags.protection.allowed.noone=无 +settings.tags.protection.create=保护Git标签 +settings.tags.protection.none=没有受保护的Git标签 +settings.tags.protection.pattern.description=你可以使用单个名称或 glob 模式匹配或正则表达式来匹配多个标签。了解更多请阅读 受保护Git标签指南 settings.bot_token=Bot 令牌 settings.chat_id=聊天 ID settings.matrix.homeserver_url=主服务器网址 @@ -1826,6 +1937,7 @@ settings.archive.success=仓库已成功归档。 settings.archive.error=仓库在归档时出现异常。请通过日志获取详细信息。 settings.archive.error_ismirror=请不要对镜像仓库归档,谢谢! settings.archive.branchsettings_unavailable=已归档仓库无法进行分支设置。 +settings.archive.tagsettings_unavailable=已归档仓库的Git标签设置不可用。 settings.unarchive.button=撤销仓库归档 settings.unarchive.header=撤销此仓库归档 settings.unarchive.text=取消存档将恢复仓库接收提交,推送,新工单和合并请求。 @@ -1885,6 +1997,7 @@ diff.file_image_width=宽度 diff.file_image_height=高度 diff.file_byte_size=大小 diff.file_suppressed=文件差异内容过多而无法显示 +diff.file_suppressed_line_too_long=文件差异因一行或多行过长而隐藏 diff.too_many_files=部分文件因为文件数量过多而无法显示 diff.comment.placeholder=留下评论 diff.comment.markdown_info=支持使用Markdown格式。 @@ -1912,6 +2025,7 @@ release.new_release=发布新版 release.draft=草稿 release.prerelease=预发行 release.stable=稳定 +release.compare=比较 release.edit=编辑 release.ahead.commits=%d 次提交 release.ahead.target=到 %s 自发布后 @@ -1938,6 +2052,7 @@ release.deletion_tag_desc=将从仓库中删除此 Git标签。仓库内容和 release.deletion_tag_success=该 Git标签 已经被删除 release.tag_name_already_exist=使用此标签名称的发布版本已经存在。 release.tag_name_invalid=标签名称无效。 +release.tag_name_protected=Git标签名称已受保护。 release.tag_already_exist=此 Git标签 名称已存在 release.downloads=下载附件 release.download_count=下载:%s @@ -1969,6 +2084,10 @@ branch.restore=恢复分支 '%s' branch.download=下载分支 '%s' branch.included_desc=此分支是默认分支的一部分 branch.included=已包含 +branch.create_new_branch=从下列分支创建分支: +branch.confirm_create_branch=创建分支 +branch.new_branch=创建新分支 +branch.new_branch_from=从 %s 创建新分支 tag.create_tag=创建标签 %s tag.create_success=标签 '%s' 已创建。 @@ -2178,6 +2297,8 @@ dashboard.total_gc_time=GC 暂停时间总量 dashboard.total_gc_pause=GC 暂停时间总量 dashboard.last_gc_pause=上次 GC 暂停时间 dashboard.gc_times=GC 执行次数 +dashboard.delete_old_actions=从数据库中删除所有旧操作记录 +dashboard.delete_old_actions.started=已开始从数据库中删除所有旧操作记录。 users.user_manage_panel=用户帐户管理 users.new_account=创建新帐户 @@ -2273,7 +2394,6 @@ auths.host=主机 auths.port=端口 auths.bind_dn=绑定 DN auths.bind_password=绑定密码 -auths.bind_password_helper=警告:密码将会被明文存储。如果可能请使用只读账号。 auths.user_base=用户搜索基准 auths.user_dn=用户 DN auths.attribute_username=用户名属性 @@ -2304,6 +2424,7 @@ auths.allowed_domains_helper=置空将允许所有域名,每个域名用逗号 auths.enable_tls=启用 TLS 加密 auths.skip_tls_verify=忽略 TLS 验证 auths.pam_service_name=PAM 服务名称 +auths.pam_email_domain=PAM 电子邮件域(可选) auths.oauth2_provider=OAuth2 提供程序 auths.oauth2_icon_url=图标 URL auths.oauth2_clientID=客户端 ID (键) @@ -2403,6 +2524,7 @@ config.db_path=数据库路径 config.service_config=服务配置 config.register_email_confirm=需要电子邮件确认注册 config.disable_register=禁止用户注册 +config.allow_only_internal_registration=只允许通过 Gitea 进行注册 config.allow_only_external_registration=仅允许通过外部服务注册 config.enable_openid_signup=启用 OpenID 自注册 config.enable_openid_signin=启用 OpenID 登录 diff --git a/options/locale/locale_zh-HK.ini b/options/locale/locale_zh-HK.ini index 716f90a32e43..a1717aa4f57e 100644 --- a/options/locale/locale_zh-HK.ini +++ b/options/locale/locale_zh-HK.ini @@ -44,6 +44,7 @@ cancel=取消 + [error] [startpage] @@ -107,12 +108,19 @@ openid_connect_title=連接到現有帳戶 openid_register_title=建立新帳戶 [mail] + activate_account=請啟用您的帳戶 + activate_email=請驗證您的郵箱地址 -register_success=註冊成功 + register_notify=歡迎來到 Gitea +register_success=註冊成功 + + + + [modal] @@ -242,6 +250,7 @@ delete_account=刪除當前帳戶 confirm_delete_account=確認刪除帳戶 + [repo] owner=擁有者 repo_name=儲存庫名稱 @@ -408,6 +417,7 @@ issues.attachment.download=`點擊下載 '%s'` issues.subscribe=訂閱 issues.unsubscribe=取消訂閱 + pulls.new=建立合併請求 pulls.filter_branch=過濾分支 pulls.no_results=未找到結果 diff --git a/options/locale/locale_zh-TW.ini b/options/locale/locale_zh-TW.ini index 8edbabe757f9..7b49562c8ef2 100644 --- a/options/locale/locale_zh-TW.ini +++ b/options/locale/locale_zh-TW.ini @@ -83,6 +83,7 @@ add=增加 add_all=全部增加 remove=移除 remove_all=全部移除 +edit=編輯 write=撰寫 preview=預覽 @@ -91,13 +92,19 @@ loading=載入中… step1=第一步: step2=第二步: +error=錯誤 error404=您正嘗試訪問的頁面 不存在您尚未被授權 查看該頁面。 +never=從來沒有 + [error] occurred=發生錯誤 report_message=如果你確定這是一個 Gitea 的 bug,請去 GitHub 搜尋相關的問題,如果有需要你也可以開一個新的問題 +missing_csrf=Bad Request: no CSRF token present +invalid_csrf=Bad Request: Invalid CSRF token [startpage] +app_desc=一套極易架設的 Git 服務 install=安裝容易 install_desc=簡單地執行您平台的二進位檔,或是使用 Docker,你也可以從套件管理員安裝。 platform=跨平台 @@ -171,7 +178,7 @@ federated_avatar_lookup=啟用 Federated Avatars federated_avatar_lookup_popup=使用 Libravatar 以啟用 Federated Avatar 查詢服務 disable_registration=關閉註冊功能 disable_registration_popup=關閉註冊功能,只有管理員可以新增帳戶。 -allow_only_external_registration_popup=僅允許通過外部服務進行註冊 +allow_only_external_registration_popup=只允許從外部服務註冊 openid_signin=啟用 OpenID 登入 openid_signin_popup=啟用 OpenID 登入 openid_signup=啟用 OpenID 註冊 @@ -187,7 +194,7 @@ admin_password=管理員密碼 confirm_password=確認密碼 admin_email=電子信箱 install_btn_confirm=安裝 Gitea -test_git_failed=無法識別 'git' 命令:%v +test_git_failed=無法識別「git」命令:%v sqlite3_not_available=您目前的版本不支援 SQLite3,請從 %s 下載官方的預先編譯版本(不是 gobuild 版本)。 invalid_db_setting=資料庫設定不正確: %v invalid_repo_path=儲存庫根目錄設定不正確:%v @@ -245,7 +252,7 @@ repo_no_results=沒有找到符合的儲存庫。 user_no_results=沒有找到符合的使用者。 org_no_results=沒有找到符合的組織。 code_no_results=找不到符合您關鍵字的原始碼。 -code_search_results=搜尋結果:'%s' +code_search_results=「%s」的搜尋結果 code_last_indexed_at=最後索引 %s [auth] @@ -259,10 +266,10 @@ forgot_password_title=忘記密碼 forgot_password=忘記密碼? sign_up_now=還沒有帳戶?馬上註冊。 sign_up_successful=帳戶已成功建立。 -confirmation_mail_sent_prompt=一封新的確認信已發送至 %s。請檢查您的收件匣,並在 %s 內完成註冊作業。 +confirmation_mail_sent_prompt=新的確認信已發送至 %s。請在 %s內檢查您的收件匣並完成註冊作業。 must_change_password=更新您的密碼 allow_password_change=要求使用者更改密碼 (推薦) -reset_password_mail_sent_prompt=一封確認信已發送至 %s。請檢查您的收件匣,並在 %s 內完成帳戶救援作業。 +reset_password_mail_sent_prompt=確認信已發送至 %s。請在 %s內檢查您的收件匣並完成帳戶救援作業。 active_your_account=啟用您的帳戶 account_activated=帳戶已啟用 prohibit_login=禁止登入 @@ -298,7 +305,8 @@ openid_connect_desc=所選的 OpenID URI 未知。在這裡連結一個新帳戶 openid_register_title=建立新帳戶 openid_register_desc=所選的 OpenID URI 未知。在這裡連結一個新帳戶。 openid_signin_desc=輸入您的 OpenID URI。例如: https://anne.me、bob.openid.org.cn 或 gnusocial.net/carry。 -disable_forgot_password_mail=已停用帳戶救援功能。請與網站管理員聯絡。 +disable_forgot_password_mail=由於未設定電子郵件功能,帳戶救援功能已被停用。請與網站管理員聯絡。 +disable_forgot_password_mail_admin=帳戶救援功能需要設定電子郵件功能才能使用。請設定電子郵件功能以啟用帳戶救援功能。 email_domain_blacklisted=您無法使用您的電子信箱註冊帳號。 authorize_application=授權應用程式 authorize_redirect_notice=如果您授權此應用程式,您將會被重新導向至 %s。 @@ -312,19 +320,63 @@ password_pwned=您選擇的密碼已被列於設定您的密碼。 + reset_password=救援您的帳戶 +reset_password.title=%s,您已請求帳戶救援 +reset_password.text=請在 %s內點擊下列連結以救援您的帳戶: + register_success=註冊成功 -register_notify=歡迎來到 Gitea + +issue_assigned.pull=@%[1]s 將儲存庫 %[3]s 的合併請求 %[2]s 指派給您。 +issue_assigned.issue=@%[1]s 將儲存庫 %[3]s 的問題 %[2]s 指派給您。 + +issue.x_mentioned_you=@%s 提到了您: +issue.action.force_push=%[1]s 強制推送了 %[2]s 自 %[3]s 至 %[4]s。 +issue.action.push_n=@%[1]s 推送了 %[3]d 個提交到 %[2]s +issue.action.close=@%[1]s 關閉了 #%[2]d。 +issue.action.reopen=@%[1]s 重新開放了 #%[2]d。 +issue.action.merge=@%[1]s 合併了 #%[2]d 到 %[3]s。 +issue.action.approve=@%[1]s 核可了此合併請求。 +issue.action.reject=@%[1]s 請求更改此合併請求。 +issue.action.review=@%[1]s 在此合併請求上留言。 +issue.action.review_dismissed=@%[1]s 取消了 %[2]s 對此合併請求的上一個審核。 +issue.action.ready_for_review=@%[1]s 標記了此合併請求為準備好供審核。 +issue.action.new=@%[1]s 建立了 #%[2]d。 +issue.in_tree_path=在 %s 中: release.new.subject=%[2]s 中的 %[1]s 發佈了 +release.new.text=@%[1]s 於 %[3]s 發佈了 %[2]s +release.title=標題:%s +release.note=說明: +release.downloads=下載: +release.download.zip=原始碼(ZIP) +release.download.targz=原始碼(TAR.GZ) repo.transfer.subject_to=%s 想要把「%s」轉移給 %s repo.transfer.subject_to_you=%s 想要把「%s」轉移給您 repo.transfer.to_you=您 +repo.transfer.body=請造訪 %s 以接受或拒絕轉移,您也可以忽略它。 repo.collaborator.added.subject=%s 把您加入到 %s +repo.collaborator.added.text=您已被新增為儲存庫的協作者: [modal] yes=確認操作 @@ -334,7 +386,7 @@ modify=更新 [form] UserName=帳號 RepoName=儲存庫名稱 -Email=郵箱地址 +Email=電子信箱 Password=密碼 Retype=再次輸入密碼 SSHTitle=SSH 金鑰名稱 @@ -342,7 +394,7 @@ HttpsUrl=HTTPS URL 地址 PayloadUrl=推送地址 TeamName=團隊名稱 AuthName=認證名稱 -AdminEmail=管理員郵箱 +AdminEmail=管理員電子信箱 NewBranchName=新的分支名稱 CommitSummary=提交摘要 @@ -354,17 +406,18 @@ Content=內容 SSPISeparatorReplacement=分隔符 SSPIDefaultLanguage=預設語言 -require_error=不能為空。 +require_error=` 不能為空。` alpha_dash_error=`應該只包含英文字母、數字、破折號 ("-")、和底線 ("_") 字元。` alpha_dash_dot_error=`應該只包含英文字母、數字、破折號 ("-")、下底線("_")和小數點 (".") 字元。` git_ref_name_error=` 必須是格式正確的 Git 參考名稱。` -size_error=長度必須為 %s。 -min_size_error=長度最小為 %s 個字符。 -max_size_error=長度最大為 %s 個字符。 -email_error=不是一個有效的郵箱地址。 -url_error=不是一個有效的 URL。 -include_error=必須包含子字符串 '%s'。 -glob_pattern_error=` glob 比對模式無效: %s.` +size_error=` 長度必須為 %s。` +min_size_error=` 長度最小為 %s 個字元。` +max_size_error=` 長度最大為 %s 個字元。` +email_error=` 是無效的電子信箱。` +url_error=` 是無效的 URL。` +include_error=` 必須包含子字串「%s」。 +glob_pattern_error=` glob 比對模式無效:%s.` +regex_pattern_error=` 正規表示式模式無效:%s.` unknown_error=未知錯誤: captcha_incorrect=驗證碼不正確。 password_not_match=密碼錯誤。 @@ -384,7 +437,7 @@ team_name_been_taken=團隊名稱已被使用。 team_no_units_error=請至少選擇一個儲存庫區域。 email_been_used=此電子信箱已被使用 email_invalid=此電子信箱無效。 -openid_been_used=OpenID 位址 '%s' 已被使用。 +openid_been_used=OpenID 位址「%s」已被使用。 username_password_incorrect=帳號或密碼不正確 password_complexity=密碼複雜度沒有通過以下的要求: password_lowercase_one=至少要有一個小寫字母 @@ -430,7 +483,7 @@ disabled_public_activity=這個使用者已對外隱藏動態 form.name_reserved=帳號「%s」是被保留的。 form.name_pattern_not_allowed=帳號不可包含字元「%s」。 -form.name_chars_not_allowed=使用者名稱 '%s' 包含無效字元。 +form.name_chars_not_allowed=使用者名稱「%s」包含無效字元。 [settings] profile=個人資料 @@ -509,13 +562,13 @@ email_deletion_success=該電子信箱已被刪除 theme_update_success=已更新佈景主題。 theme_update_error=選取的佈景主題不存在。 openid_deletion=移除 OpenID 位址 -openid_deletion_desc=從您的帳戶刪除此 OpenID 位址將會無法使用它進行登入。你確定要繼續嗎? +openid_deletion_desc=從您的帳戶刪除此 OpenID 位址將會無法使用它進行登入。是否繼續? openid_deletion_success=該 OpenID 已被刪除 add_new_email=新增電子信箱 add_new_openid=新增 OpenID URI add_email=新增電子信箱 add_openid=新增 OpenID URI -add_email_confirmation_sent=一封新的確認郵件已發送至 '%s',請檢查您的收件匣並在 %s 內確認您的電郵地址。 +add_email_confirmation_sent=確認信已發送至「%s」,請在 %s內檢查您的收件匣並確認您的電子信箱。 add_email_success=已加入新的電子信箱。 email_preference_set_success=已套用郵件偏好設定 add_openid_success=該 OpenID 已添加。 @@ -541,7 +594,6 @@ ssh_key_been_used=此 SSH 金鑰已添加到伺服器。 ssh_key_name_used=已有相同名稱的 SSH 金鑰存在於您的帳戶。 ssh_principal_been_used=此伺服器已有名為「%s」的主體。 gpg_key_id_used=已存在具有相同 ID 的 GPG 金鑰。 -gpg_no_key_email_found=此 GPG 金鑰不適用於您的任何電子信箱。 subkeys=次金鑰 key_id=金鑰 ID key_name=金鑰名稱 @@ -596,7 +648,7 @@ manage_oauth2_applications=管理 OAuth2 應用程式 edit_oauth2_application=編輯 OAuth2 應用程式 oauth2_applications_desc=OAuth2 應用程式讓您的第三方應用程式安全地驗證此 Gitea 中的使用者。 remove_oauth2_application=刪除 OAuth2 應用程式 -remove_oauth2_application_desc=刪除 OAuth2 應用程式將會撤銷所有已簽署的 access token 存取權。繼續嗎? +remove_oauth2_application_desc=刪除 OAuth2 應用程式將會撤銷所有已簽署的 Access Token 存取權。是否繼續? remove_oauth2_application_success=已刪除應用程式。 create_oauth2_application=新增 OAuth2 應用程式 create_oauth2_application_button=建立應用程式 @@ -615,7 +667,7 @@ oauth2_regenerate_secret_hint=遺失您的密鑰? oauth2_client_secret_hint=請備份您的祕鑰。祕鑰在您離開這個頁面後將不會再顯示。 oauth2_application_edit=編輯 oauth2_application_create_description=OAuth2 應用程式讓您的第三方應用程式可以存取此 Gitea 上的帳戶。 -oauth2_application_remove_description=刪除 OAuth2 應用會拒絕它存取此 Gitea 上已授權的帳戶。繼續嗎? +oauth2_application_remove_description=刪除 OAuth2 應用會拒絕它存取此 Gitea 上已授權的帳戶。是否繼續? authorized_oauth2_applications=已授權的 OAuth2 應用程式 authorized_oauth2_applications_description=您已授權給這些第三方應用程式存取您個人 Gitea 帳戶。請對不再需要的應用程式撤銷存取權。 @@ -648,7 +700,7 @@ u2f_register_key=新增安全密鑰 u2f_nickname=暱稱 u2f_press_button=按下安全密鑰上的密碼進行註冊。 u2f_delete_key=移除安全密鑰 -u2f_delete_key_desc=如果刪除安全金鑰,將不能再使用它登入。確定要刪除嗎? +u2f_delete_key_desc=如果刪除安全金鑰,將不能再使用它登入。是否繼續? manage_account_links=管理已連結的帳戶 manage_account_links_desc=這些外部帳戶已連結到您的 Gitea 帳戶。 @@ -672,8 +724,16 @@ email_notifications.onmention=只在被提到時傳送郵件通知 email_notifications.disable=關閉郵件通知 email_notifications.submit=套用郵件偏好設定 +visibility=使用者瀏覽權限 +visibility.public=公開 +visibility.public_tooltip=對所有人公開 +visibility.limited=受限 +visibility.limited_tooltip=只有登入的使用者才能看到 +visibility.private=私人 +visibility.private_tooltip=只有組織成員才能看到 + [repo] -new_repo_helper=儲存庫包含所以專案檔案,包含修訂歷史。已經存放於別處了嗎?遷移儲存庫。 +new_repo_helper=儲存庫包含所有專案檔案,包含修訂歷史。已經存放於別處了嗎?遷移儲存庫。 owner=擁有者 owner_helper=組織可能因為儲存庫數量上限而未列入此選單。 repo_name=儲存庫名稱 @@ -722,7 +782,7 @@ mirror_prune_desc=刪除過時的遠端追蹤參考 mirror_interval=鏡像間隔(有效時間單位為 'h'、'm'、's')。設為 0 以停用自動同步。 mirror_interval_invalid=鏡像週期無效 mirror_address=從 URL Clone -mirror_address_desc=在 Clone 授權資訊中填入必要的資料。 +mirror_address_desc=在授權資訊中填入必要的資料。 mirror_address_url_invalid=提供的網址無效。請檢查您輸入的網址是否正確。 mirror_address_protocol_invalid=提供的網址無效。只能從 http(s):// 或是 git:// 位址鏡像儲存庫。 mirror_lfs=Large File Storage (LFS) @@ -730,6 +790,9 @@ mirror_lfs_desc=啟動 LFS 檔案的鏡像功能。 mirror_lfs_endpoint=LFS 端點 mirror_lfs_endpoint_desc=同步將會嘗試使用 Clone URL 來確認 LFS 伺服器。如果存儲庫的 LFS 資料放在其它地方,您也可以指定自訂的端點。 mirror_last_synced=上次同步 +mirror_password_placeholder=(未變更) +mirror_password_blank_placeholder=(未設定) +mirror_password_help=修改帳號以清除已儲存的密碼。 watchers=關注者 stargazers=占星術師 forks=Fork @@ -746,6 +809,7 @@ delete_preexisting_label=刪除 delete_preexisting=刪除既有的檔案 delete_preexisting_content=刪除 %s 中的檔案 delete_preexisting_success=刪除 %s 中未接管的檔案 +blame_prior=檢視此變更前的 Blame transfer.accept=同意轉移 transfer.accept_desc=轉移到「%s」 @@ -779,10 +843,10 @@ archive.pull.nocomment=此存儲庫已封存,您不能在合併請求上留言 form.reach_limit_of_creation_1=您已經達到了您儲存庫的數量上限 (%d 個)。 form.reach_limit_of_creation_n=您已經達到了您儲存庫的數量上限 (%d 個)。 -form.name_reserved=儲存庫名稱 '%s' 是預留的。 -form.name_pattern_not_allowed=儲存庫名稱無法使用 "%s"。 +form.name_reserved=儲存庫名稱「%s」是被保留的。 +form.name_pattern_not_allowed=儲存庫名稱不可包含字元「%s」。 -need_auth=Clone 授權資訊 +need_auth=授權 migrate_options=遷移選項 migrate_service=遷移服務 migrate_options_mirror_helper=將此儲存庫設定為鏡像儲存庫 @@ -816,11 +880,19 @@ migrated_from_fake=已從 %[1]s 遷移 migrate.migrate=從 %s 遷移 migrate.migrating=正在從 %s 遷移... migrate.migrating_failed=從 %s 遷移失敗 +migrate.migrating_failed.error=錯誤:%s migrate.github.description=從 Github.com 或 Github Enterprise 遷移資料。 migrate.git.description=從 Git 服務遷移或鏡像資料。 migrate.gitlab.description=從 GitLab.com 或自託管的 Gitlab 伺服器遷移資料。 migrate.gitea.description=從 Gitea.com 或自託管的 Gitea 伺服器遷移資料。 migrate.gogs.description=從 notabug.org 或自託管的 Gogs 伺服器遷移資料。 +migrate.migrating_git=正在遷移 Git 資料 +migrate.migrating_topics=正在遷移主題 +migrate.migrating_milestones=正在遷移里程碑 +migrate.migrating_labels=正在遷移標籤 +migrate.migrating_releases=正在遷移版本發佈 +migrate.migrating_issues=正在遷移問題 +migrate.migrating_pulls=正在遷移合併請求 mirror_from=鏡像自 forked_from=fork 自 @@ -853,6 +925,7 @@ branch=分支 tree=目錄樹 clear_ref=`清除目前的參考` filter_branch_and_tag=過濾分支或標籤 +find_tag=尋找標籤 branches=分支 tags=標籤 issues=問題 @@ -877,7 +950,7 @@ file_view_raw=查看原始文件 file_permalink=永久連結 file_too_large=檔案太大,無法顯示。 video_not_supported_in_browser=您的瀏覽器不支援使用 HTML5 播放影片。 -audio_not_supported_in_browser=您的瀏覽器不支援 HTML5 'audio' 標籤 +audio_not_supported_in_browser=您的瀏覽器不支援 HTML5 的「audio」標籤 stored_lfs=已使用 Git LFS 儲存 symbolic_link=符號連結 commit_graph=提交線圖 @@ -910,9 +983,9 @@ editor.cancel_lower=取消 editor.commit_signed_changes=提交簽署過的變更 editor.commit_changes=提交變更 editor.add_tmpl=新增「」 -editor.add=新增 '%s' -editor.update=更新 '%s' -editor.delete=刪除 '%s' +editor.add=新增「%s」 +editor.update=更新「%s」 +editor.delete=刪除「%s」 editor.commit_message_desc=(選填)加入詳細說明... editor.signoff_desc=在提交訊息底部加入提交者的「Signed-off-by」資訊。 editor.commit_directly_to_this_branch=直接提交到 %s 分支。 @@ -924,7 +997,7 @@ editor.cancel=取消 editor.filename_cannot_be_empty=檔案名稱不能為空。 editor.filename_is_invalid=檔名無效:%s editor.branch_does_not_exist=此儲存庫沒有名為「%s」的分支。 -editor.branch_already_exists='%s' 已存在於此存儲庫。 +editor.branch_already_exists=此儲存庫已有名為「%s」的分支。 editor.directory_is_a_file=目錄名稱「%s」已被此儲存庫的檔案使用。 editor.file_is_a_symlink=「%s」是符號連結。無法在網頁編輯器中修改符號連結。 editor.filename_is_a_directory=檔案名稱「%s」已被此儲存庫的目錄使用。 @@ -941,9 +1014,9 @@ editor.push_rejected_no_message=該變更被伺服器拒絕但未提供其它資 editor.push_rejected=該變更被伺服器拒絕但。請檢查 Githook。 editor.push_rejected_summary=完整的拒絕訊息: editor.add_subdir=加入目錄 -editor.unable_to_upload_files=上傳檔案失敗到 '%s', 錯誤訊息: %v +editor.unable_to_upload_files=上傳檔案到「%s」時失敗,錯誤訊息:%v editor.upload_file_is_locked=檔案「%s」已被 %s 鎖定 -editor.upload_files_to_dir=上傳檔案到 '%s' +editor.upload_files_to_dir=上傳檔案到「%s」 editor.cannot_commit_to_protected_branch=無法提交到受保護的分支「%s」。 editor.no_commit_to_branch=無法直接提交到分支因為: editor.user_no_push_to_branch=使用者無法推送到分支 @@ -977,14 +1050,14 @@ projects.create=建立專案 projects.title=標題 projects.new=新增專案 projects.new_subheader=在同一個地方協調、追蹤和更新您的工作,使專案保持透明並按計畫進行。 -projects.create_success=已建立專案 '%s'。 +projects.create_success=已建立專案「%s」。 projects.deletion=刪除專案 projects.deletion_desc=刪除專案會從所有相關的問題移除它。是否繼續? projects.deletion_success=專案已被刪除。 projects.edit=編輯專案 projects.edit_subheader=專案可用來組織問題和追蹤進度。 projects.modify=更新專案 -projects.edit_success=專案 '%s' 已被更新。 +projects.edit_success=已更新專案「%s」。 projects.type.none=無 projects.type.basic_kanban=基本看板 projects.type.bug_triage=Bug 檢傷分類 @@ -999,7 +1072,7 @@ projects.board.new=新增看板 projects.board.set_default=設為預設 projects.board.set_default_desc=將此看板設定為未分類問題及合併請求的預設看板 projects.board.delete=刪除看板 -projects.board.deletion_desc=刪除專案看板會將相關的問題移動到 '未分類'。是否繼續? +projects.board.deletion_desc=刪除專案看板會將所有相關的問題移動到「未分類」,是否繼續? projects.open=開啟 projects.close=關閉 @@ -1047,7 +1120,7 @@ issues.label_templates.title=載入一組預定義的標籤 issues.label_templates.info=沒有任何標籤。點擊「新增標籤」按鈕或使用預定義的標籤集。 issues.label_templates.helper=選擇一個標籤集 issues.label_templates.use=使用標籤集 -issues.label_templates.fail_to_load_file=載入標籤範本檔案 '%s' 失敗: %v +issues.label_templates.fail_to_load_file=載入標籤範本檔「%s」失敗:%v issues.add_label=加入了 %s 標籤 %s issues.add_labels=加入了 %s 標籤 %s issues.remove_label=移除了 %s 標籤 %s @@ -1070,7 +1143,7 @@ issues.delete_branch_at=`刪除分支 %s %s` issues.open_tab=%d 個開放中 issues.close_tab=%d 個已關閉 issues.filter_label=標籤 -issues.filter_label_exclude=`使用 alt + click/enter 來排除標籤`。 +issues.filter_label_exclude=`使用 alt + click/enter 來排除標籤` issues.filter_label_no_select=所有標籤 issues.filter_milestone=里程碑 issues.filter_milestone_no_select=所有里程碑 @@ -1160,15 +1233,15 @@ issues.label_edit=編輯 issues.label_delete=刪除 issues.label_modify=編輯標籤 issues.label_deletion=刪除標籤 -issues.label_deletion_desc=刪除標籤會將其從所有問題中刪除,繼續? +issues.label_deletion_desc=刪除標籤會將其從所有問題中刪除。是否繼續? issues.label_deletion_success=標籤已刪除。 issues.label.filter_sort.alphabetically=按字母順序排序 issues.label.filter_sort.reverse_alphabetically=按字母反向排序 issues.label.filter_sort.by_size=檔案由小到大 issues.label.filter_sort.reverse_by_size=檔案由大到小 issues.num_participants=%d 參與者 -issues.attachment.open_tab=`在新分頁中查看 '%s'` -issues.attachment.download=`點擊下載 '%s'` +issues.attachment.open_tab=`在新分頁中查看「%s」` +issues.attachment.download=`點擊下載「%s」` issues.subscribe=訂閱 issues.unsubscribe=取消訂閱 issues.lock=鎖定對話 @@ -1212,7 +1285,7 @@ issues.add_time_sum_to_small=沒有輸入時間。 issues.time_spent_total=總花費時間 issues.time_spent_from_all_authors=`總花費時間:%s` issues.due_date=截止日期 -issues.invalid_due_date_format=截止日期的格式錯誤,必須是 "yyyy-mm-dd" 的形式。 +issues.invalid_due_date_format=截止日期的格式必須為「yyyy-mm-dd」。 issues.error_modifying_due_date=無法修改截止日期。 issues.error_removing_due_date=無法移除截止日期。 issues.push_commit_1=加入了 %d 個提交 %s @@ -1283,6 +1356,9 @@ issues.review.resolved_by=標記了此對話為已解決 issues.assignee.error=因為未預期的錯誤,未能成功指派所有成員。 issues.reference_issue.body=內容 +compare.compare_base=基底分支 +compare.compare_head=比較 + pulls.desc=啟用合併請求和程式碼審核。 pulls.new=建立合併請求 pulls.compare_changes=建立合併請求 @@ -1293,7 +1369,7 @@ pulls.filter_branch=過濾分支 pulls.no_results=未找到結果 pulls.nothing_to_compare=這些分支的內容相同,無需建立合併請求。 pulls.nothing_to_compare_and_allow_empty_pr=這些分支的內容相同,此合併請求將會是空白的。 -pulls.has_pull_request=`已有相同的合併請求: %[2]s#%[3]d` +pulls.has_pull_request=`已有相同的合併請求:%[2]s#%[3]d` pulls.create=建立合併請求 pulls.title_desc=請求將 %[1]d 次程式碼提交從 %[2]s 合併至 %[3]s pulls.merged_title_desc=將 %[1]d 次代碼提交從 %[2]s 合併至 %[3]s %[4]s @@ -1310,7 +1386,10 @@ pulls.manually_merged_as=此合併請求已被手動合併為 標題用 %s 開頭以避免意外地合併此合併請求。` -pulls.cannot_merge_work_in_progress=此合併請求被標記為仍在作業中。準備好要合併時請移除 %s 前綴。 +pulls.cannot_merge_work_in_progress=此合併請求被標記為還在進行中(WIP)。 +pulls.still_in_progress=還在進行中嗎? +pulls.add_prefix=加入 %s 前綴 +pulls.remove_prefix=移除 %s 前綴 pulls.data_broken=此合併請求已損毀,因為遺失 Fork 資訊。 pulls.files_conflicted=此合併請求有變更和目標分支衝突。 pulls.is_checking=正在進行合併衝突檢查,請稍後再試。 @@ -1394,13 +1473,13 @@ milestones.title=標題 milestones.desc=描述 milestones.due_date=截止日期(可選) milestones.clear=清除 -milestones.invalid_due_date_format=截止日期的格式必須為 'yyyy-mm-dd'。 -milestones.create_success=已建立里程碑 '%s'。 +milestones.invalid_due_date_format=截止日期的格式必須為「yyyy-mm-dd」。 +milestones.create_success=已建立里程碑「%s」。 milestones.edit=編輯里程碑 milestones.edit_subheader=里程碑可用來組織問題和追蹤進度。 milestones.cancel=取消 milestones.modify=更新里程碑 -milestones.edit_success=里程碑 '%s' 已更新。 +milestones.edit_success=已更新里程碑「%s」。 milestones.deletion=刪除里程碑 milestones.deletion_desc=刪除里程碑會從所有相關的問題移除它。是否繼續? milestones.deletion_success=里程碑已刪除 @@ -1510,7 +1589,7 @@ activity.git_stats_file_1=%d 個檔案 activity.git_stats_file_n=%d 個檔案 activity.git_stats_files_changed_1=已變更 activity.git_stats_files_changed_n=已變更 -activity.git_stats_additions=: +activity.git_stats_additions=: activity.git_stats_addition_1=新增 %d 行 activity.git_stats_addition_n=新增 %d 行 activity.git_stats_and_deletions=和 @@ -1536,7 +1615,16 @@ settings.hooks=Webhook settings.githooks=Git Hook settings.basic_settings=基本設定 settings.mirror_settings=鏡像設定 -settings.sync_mirror=現在同步 +settings.mirror_settings.docs=設定您的專案自動向其它儲存庫推送、拉取變更,分支、標籤和提交會自動同步。如何鏡像儲存庫? +settings.mirror_settings.mirrored_repository=已鏡像的儲存庫 +settings.mirror_settings.direction=方向 +settings.mirror_settings.direction.pull=拉取 +settings.mirror_settings.direction.push=推送 +settings.mirror_settings.last_update=最近更新時間 +settings.mirror_settings.push_mirror.none=未設定推送鏡像 +settings.mirror_settings.push_mirror.remote_url=Git 遠端儲存庫 URL +settings.mirror_settings.push_mirror.add=新增推送鏡像 +settings.sync_mirror=立即同步 settings.mirror_sync_in_progress=鏡像同步正在進行中。 請稍後再回來看看。 settings.email_notifications.enable=啟用郵件通知 settings.email_notifications.onmention=只在被提到時傳送郵件通知 @@ -1544,6 +1632,7 @@ settings.email_notifications.disable=關閉郵件通知 settings.email_notifications.submit=套用郵件偏好設定 settings.site=網站 settings.update_settings=更新設定 +settings.branches.update_default_branch=更新預設分支 settings.advanced_settings=進階設定 settings.wiki_desc=啟用儲存庫 Wiki settings.use_internal_wiki=使用內建 Wiki @@ -1573,6 +1662,7 @@ settings.pulls.allow_rebase_merge_commit=啟用 Rebase 顯式合併提交(--no-f settings.pulls.allow_squash_commits=啟用 Squash 合併提交 settings.pulls.allow_manual_merge=允許將合併請求標記為手動合併 settings.pulls.enable_autodetect_manual_merge=啟用自動偵測手動合併(注意:在某些特殊情況下可能發生誤判) +settings.pulls.default_delete_branch_after_merge=預設在合併後刪除合併請求分支 settings.projects_desc=啟用儲存庫專案 settings.admin_settings=管理員設定 settings.admin_enable_health_check=啟用儲存庫的健康檢查 (git fsck) @@ -1600,6 +1690,7 @@ settings.transfer_form_title=輸入儲存庫名稱以確認: settings.transfer_in_progress=目前正在進行轉移。如果您想要將此儲存庫轉移給其它使用者,請取消它。 settings.transfer_notices_1=- 如果將此儲存庫轉移給個別使用者,您將會失去此儲存庫的存取權。 settings.transfer_notices_2=- 如果將此儲存庫轉移到您(共同)擁有的組織,您將能繼續保有此儲存庫的存取權。 +settings.transfer_notices_3=- 如果此儲存庫為私有儲存庫且將轉移給個別使用者,此動作確保該使用者至少擁有讀取權限(必要時將會修改權限)。 settings.transfer_owner=新擁有者 settings.transfer_perform=進行轉移 settings.transfer_started=此儲存庫已被標記為待轉移且正在等待「%s」的確認 @@ -1723,7 +1814,7 @@ settings.event_pull_request_review_desc=核准、退回或提出審核留言。 settings.event_pull_request_sync=合併請求同步 settings.event_pull_request_sync_desc=合併請求同步。 settings.branch_filter=分支篩選 -settings.branch_filter_desc=推送、建立分支、刪除分支事件的白名單,請使用 glob 比對模式。如果留白或輸入*,所有分支的事件都會被回報。語法參見 github.com/gobwas/glob。範例:master, {master,release*}。 +settings.branch_filter_desc=推送、建立分支、刪除分支事件的白名單,請使用 glob 比對模式。如果留白或輸入*,所有分支的事件都會被回報。語法參見 github.com/gobwas/glob。範例:master, {master,release*}。 settings.active=啟用 settings.active_helper=觸發事件的資訊將會被送到此 Webhook URL。 settings.add_hook_success=Webhook 新增成功! @@ -1782,7 +1873,7 @@ settings.protect_merge_whitelist_teams=允許合併的團隊: settings.protect_check_status_contexts=啟用狀態檢查 settings.protect_check_status_contexts_desc=合併前必須先通過狀態檢查。選擇合併前必須通過的檢查。啟用時,必須先將提交推送到另一個分支,通過狀態檢查後再合併或直接推送到符合規則的分支。如果未選擇任何項目,最一個提交必將成功通過狀態檢查。 settings.protect_check_status_contexts_list=此儲存庫一週內曾進行過狀態檢查 -settings.protect_required_approvals=需要的核可數量: +settings.protect_required_approvals=需要的核可數量: settings.protect_required_approvals_desc=只有在獲得足夠數量的核可後才能進行合併。 settings.protect_approvals_whitelist_enabled=使用白名單控管審核人員與團隊 settings.protect_approvals_whitelist_enabled_desc=只有白名單內的使用者與團隊會被計入需要的核可數量。未使用白名單時,將計算任何有寫入權限之人的核可。 @@ -1793,11 +1884,11 @@ settings.dismiss_stale_approvals_desc=當新的提交有修改到合併請求的 settings.require_signed_commits=僅接受經簽署的提交 settings.require_signed_commits_desc=拒絕未經簽署或未經驗證的提交推送到此分支。 settings.protect_protected_file_patterns=受保護的檔案模式(以分號區隔「\;」): -settings.protect_protected_file_patterns_desc=即便您有權限新增、修改和刪除此分支的檔案,仍不允許直接修改受保護的檔案。可以用分號「\;」分隔多個模式。請於 github.com/gobwas/glob 文件查看模式格式。範例:.drone.yml, /docs/**/*.txt。 +settings.protect_protected_file_patterns_desc=即便使用者有權限新增、修改和刪除此分支的檔案,仍不允許直接修改受保護的檔案。可以用半形分號「\;」分隔多個模式。請於github.com/gobwas/glob 文件查看模式格式。範例:.drone.yml, /docs/**/*.txt。 settings.add_protected_branch=啟用保護 settings.delete_protected_branch=停用保護 -settings.update_protect_branch_success='%s' 的分支保護已被更新 -settings.remove_protected_branch_success='%s' 的分支保護已被停用 +settings.update_protect_branch_success=已更新「%s」的分支保護。 +settings.remove_protected_branch_success=已停用「%s」的分支保護。 settings.protected_branch_deletion=停用分支保護 settings.protected_branch_deletion_desc=停用分支保護將允許有寫入權限的使用者推送至該分支,是否繼續? settings.block_rejected_reviews=有退回的審核時阻擋合併 @@ -1812,6 +1903,16 @@ settings.choose_branch=選擇一個分支... settings.no_protected_branch=沒有受保護的分支。 settings.edit_protected_branch=編輯 settings.protected_branch_required_approvals_min=需要的核可數量不能為負數。 +settings.tags=標籤 +settings.tags.protection=標籤保護 +settings.tags.protection.pattern=標籤格式 +settings.tags.protection.allowed=允許的 +settings.tags.protection.allowed.users=允許的使用者 +settings.tags.protection.allowed.teams=允許的團隊 +settings.tags.protection.allowed.noone=無 +settings.tags.protection.create=保護標籤 +settings.tags.protection.none=沒有受保護的標籤。 +settings.tags.protection.pattern.description=您可以使用單一名稱、Glob 模式、正規表示式來配對多個標籤。在受保護的標籤指南閱讀更多內容。 settings.bot_token=Bot Token settings.chat_id=Chat ID settings.matrix.homeserver_url=Homeserver 網址 @@ -1825,6 +1926,7 @@ settings.archive.success=此儲存庫已被封存 settings.archive.error=嘗試封存儲存庫時發生錯誤。查看日誌檔以獲得更多資訊。 settings.archive.error_ismirror=無法封存鏡像儲存庫。 settings.archive.branchsettings_unavailable=已封存的儲存庫無法使用分支設定。 +settings.archive.tagsettings_unavailable=已封存的儲存庫無法使用標籤設定。 settings.unarchive.button=解除封存儲存庫 settings.unarchive.header=解除封存本儲存庫 settings.unarchive.text=取消封存此儲存庫將使它能再次接收提交、推送、新問題與合併請求。 @@ -1862,10 +1964,10 @@ diff.parent=父節點 diff.commit=當前提交 diff.git-notes=備註 diff.data_not_available=沒有內容比較可以使用 -diff.options_button=Diff 選項 +diff.options_button=差異選項 diff.show_diff_stats=顯示統計資料 diff.download_patch=下載 Patch 檔 -diff.download_diff=下載 Diff 檔 +diff.download_diff=下載差異檔 diff.show_split_view=分割檢視 diff.show_unified_view=合併檢視 diff.whitespace_button=空白符號 @@ -1883,8 +1985,9 @@ diff.file_after=之後 diff.file_image_width=寬度 diff.file_image_height=高度 diff.file_byte_size=大小 -diff.file_suppressed=文件差異過大導致無法顯示 -diff.too_many_files=部分文件因文件數量過多而無法顯示 +diff.file_suppressed=檔案差異因為檔案過大而無法顯示 +diff.file_suppressed_line_too_long=檔案差異因為一行或多行太長而無法顯示 +diff.too_many_files=本差異變更的檔案數量過多導致部分檔案未顯示 diff.comment.placeholder=留言... diff.comment.markdown_info=支援 markdown 格式。 diff.comment.add_single_comment=加入單獨的留言 @@ -1911,6 +2014,7 @@ release.new_release=發佈新版本 release.draft=草稿 release.prerelease=預發佈版本 release.stable=穩定 +release.compare=比較 release.edit=編輯 release.ahead.commits=%d 次提交 release.ahead.target=在此版本發佈後被加入到 %s @@ -1937,6 +2041,7 @@ release.deletion_tag_desc=即將從儲存庫移除此標籤。儲存庫內容和 release.deletion_tag_success=已刪除此標籤。 release.tag_name_already_exist=已經存在使用相同標籤的發佈版本。 release.tag_name_invalid=標籤名稱無效。 +release.tag_name_protected=標籤名稱已受保護。 release.tag_already_exist=此標籤名稱已存在。 release.downloads=下載附件 release.download_count=下載次數:%s @@ -1945,7 +2050,7 @@ release.add_tag=只建立標籤 branch.name=分支名稱 branch.search=搜尋分支 -branch.already_exists=分支名稱 ”%s“ 已經存在 +branch.already_exists=已存在名為「%s」的分支。 branch.delete_head=刪除 branch.delete=刪除分支「%s」 branch.delete_html=刪除分支 @@ -1954,20 +2059,24 @@ branch.deletion_success=分支「%s」已被刪除。 branch.deletion_failed=刪除分支「%s」失敗。 branch.delete_branch_has_new_commits=因為合併後已加入了新的提交,「%s」分支無法被刪除。 branch.create_branch=建立分支 %s -branch.create_from=從 '%s' -branch.create_success=已建立分支 '%s'。 -branch.branch_already_exists=分支 '%s' 已存在此儲存庫 +branch.create_from=從「%s」 +branch.create_success=已建立分支「%s」。 +branch.branch_already_exists=此儲存庫已有名為「%s」的分支。 branch.branch_name_conflict=分支名稱「%s」與現有分支「%s」衝突。 branch.tag_collision=無法建立「%s」分支,因為此儲存庫中已有同名的標籤。 -branch.deleted_by=刪除人: %s +branch.deleted_by=由 %s 刪除 branch.restore_success=已還原分支「%s」。 branch.restore_failed=還原分支 %s 失敗 branch.protected_deletion_failed=分支「%s」已被保護,不能刪除。 branch.default_deletion_failed=分支「%s」為預設分支,不能刪除。 branch.restore=還原分支「%s」 -branch.download=下載分支 '%s' +branch.download=下載分支「%s」 branch.included_desc=此分支是預設分支的一部分 branch.included=包含 +branch.create_new_branch=從下列分支建立分支: +branch.confirm_create_branch=建立分支 +branch.new_branch=建立新分支 +branch.new_branch_from=從「%s」建立新分支 tag.create_tag=建立標籤 %s tag.create_success=已建立標籤「%s」。 @@ -1975,7 +2084,7 @@ tag.create_success=已建立標籤「%s」。 topic.manage_topics=管理主題 topic.done=完成 topic.count_prompt=您最多能選擇 25 個主題 -topic.format_prompt=主題必須以字母或數字為開頭,可包含連接號 ('-') 且最長為 35 個字。 +topic.format_prompt=主題必須以字母或數字為開頭,可包含連接號「-」且最長為 35 個字元。 error.csv.too_large=無法渲染此檔案,因為它太大了。 error.csv.unexpected=無法渲染此檔案,因為它包含了未預期的字元,於第 %d 行第 %d 列。 @@ -2003,8 +2112,8 @@ team_permission_desc=權限 team_unit_desc=允許存取的儲存庫區域 team_unit_disabled=(已停用) -form.name_reserved=組織名稱 '%s' 是被保留的。 -form.name_pattern_not_allowed=儲存庫名稱無法使用 "%s"。 +form.name_reserved=組織名稱「%s」是被保留的。 +form.name_pattern_not_allowed=儲存庫名稱不可包含字元「%s」。 form.create_org_not_allowed=此帳號禁止建立組織。 settings=組織設定 @@ -2016,9 +2125,9 @@ settings.permission=權限 settings.repoadminchangeteam=儲存庫管理者可增加與移除團隊權限 settings.visibility=瀏覽權限 settings.visibility.public=公開 -settings.visibility.limited=受限(只有登入的使用者才能看到) +settings.visibility.limited=受限(只有登入的使用者才能看到) settings.visibility.limited_shortname=受限 -settings.visibility.private=私有(只有組織成員才能看到) +settings.visibility.private=私有(只有組織成員才能看到) settings.visibility.private_shortname=私有 settings.update_settings=更新設定 @@ -2193,7 +2302,7 @@ users.created=建立時間 users.last_login=上次登入 users.never_login=從未登入 users.send_register_notify=寄送使用者註冊通知 -users.new_success=已建立新帳戶 '%s'。 +users.new_success=已建立新帳戶「%s」。 users.edit=編輯 users.auth_source=認證來源 users.local=本地 @@ -2274,7 +2383,6 @@ auths.host=主機地址 auths.port=連接埠 auths.bind_dn=Bind DN auths.bind_password=Bind 密碼 -auths.bind_password_helper=警告:此密碼以明文存儲。 請儘可能使用唯讀帳戶。 auths.user_base=用戶搜尋基準 auths.user_dn=用戶 DN auths.attribute_username=帳號屬性 @@ -2301,10 +2409,11 @@ auths.smtp_auth=SMTP 驗證類型 auths.smtphost=SMTP 主機地址 auths.smtpport=SMTP 連接埠 auths.allowed_domains=域名白名單 -auths.allowed_domains_helper=留白以允許所有域名。以逗號 (',') 分隔多個域名。 +auths.allowed_domains_helper=留白以允許所有域名。以半形逗號「,」分隔多個域名。 auths.enable_tls=啟用 TLS 加密 auths.skip_tls_verify=忽略 TLS 驗證 auths.pam_service_name=PAM 服務名稱 +auths.pam_email_domain=PAM 電子信箱域名(非必要) auths.oauth2_provider=OAuth2 提供者 auths.oauth2_icon_url=圖示 URL auths.oauth2_clientID=客戶端 ID (金鑰) @@ -2345,7 +2454,7 @@ auths.tip.yandex=建立新的應用程式,從「Yandex.Passport API」區塊 auths.tip.mastodon=輸入您欲認證的 Mastodon 執行個體的自訂網址(或使用預設值) auths.edit=修改認證來源 auths.activated=該認證來源已啟用 -auths.new_success=已增加認證'%s'。 +auths.new_success=已增加認證「%s」。 auths.update_success=已更新認證來源。 auths.update=更新認證來源 auths.delete=刪除認證來源 @@ -2404,13 +2513,14 @@ config.db_path=資料庫路徑 config.service_config=服務組態 config.register_email_confirm=要求註冊時確認電子郵件 config.disable_register=關閉註冊功能 +config.allow_only_internal_registration=只允許從 Gitea 註冊 config.allow_only_external_registration=只允許從外部服務註冊 config.enable_openid_signup=啟用 OpenID 註冊 config.enable_openid_signin=啟用 OpenID 登入 config.show_registration_button=顯示註冊按鈕 config.require_sign_in_view=需要登入才能瀏覽頁面 config.mail_notify=啟用郵件通知 -config.disable_key_size_check=禁用金鑰最小長度檢查 +config.disable_key_size_check=停用金鑰最小長度檢查 config.enable_captcha=啟用驗證碼 config.active_code_lives=啟用用戶連結有效期 config.reset_password_code_lives=帳戶救援碼有效時間 @@ -2430,7 +2540,7 @@ config.skip_tls_verify=略過 TLS 驗證 config.mailer_config=SMTP 組態 config.mailer_enabled=啟用服務 -config.mailer_disable_helo=禁用 HELO 操作 +config.mailer_disable_helo=停用 HELO 操作 config.mailer_name=發送者名稱 config.mailer_host=郵件主機地址 config.mailer_user=發送者帳號 @@ -2440,8 +2550,8 @@ config.mailer_sendmail_args=Sendmail 參數 config.mailer_sendmail_timeout=Sendmail 逾時 config.test_email_placeholder=電子信箱 (例:test@example.com) config.send_test_mail=傳送測試郵件 -config.test_mail_failed=傳送測試郵件到 '%s' 時失敗:'%v" -config.test_mail_sent=測試郵件已發送到 '%s' +config.test_mail_failed=傳送測試郵件到「%s」時失敗:%v +config.test_mail_sent=測試郵件已傳送到「%s」。 config.oauth_config=OAuth 組態 config.oauth_enabled=啟用服務 @@ -2467,10 +2577,10 @@ config.disable_gravatar=停用 Gravatar config.enable_federated_avatar=啟用 Federated Avatars config.git_config=Git 組態 -config.git_disable_diff_highlight=禁用比較語法高亮 -config.git_max_diff_lines=Max Diff 線 (對於單個檔) -config.git_max_diff_line_characters=最大比較的字元 (單行) -config.git_max_diff_files=Max Diff 檔 (顯示) +config.git_disable_diff_highlight=停用比較語法高亮 +config.git_max_diff_lines=差異比較時顯示的最多行數(單檔) +config.git_max_diff_line_characters=差異比較時顯示的最多字元數(單行) +config.git_max_diff_files=差異比較時顯示的最多檔案數 config.git_gc_args=GC 參數 config.git_migrate_timeout=遷移逾時 config.git_mirror_timeout=鏡像更新超時 diff --git a/package-lock.json b/package-lock.json index a1d1c6080e39..43f1b6f45f89 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,13 +9,11 @@ "@claviska/jquery-minicolors": "2.3.5", "@primer/octicons": "13.0.0", "add-asset-webpack-plugin": "2.0.1", - "clipboard": "2.0.8", "codemirror": "5.61.0", "css-loader": "5.2.4", - "css-minimizer-webpack-plugin": "2.0.0", "dropzone": "5.9.2", - "easymde": "2.14.0", - "esbuild-loader": "2.12.0", + "easymde": "2.15.0", + "esbuild-loader": "2.13.0", "escape-goat": "4.0.0", "fast-glob": "3.2.5", "font-awesome": "4.7.0", @@ -24,22 +22,21 @@ "less": "4.1.1", "less-loader": "8.1.1", "license-checker-webpack-plugin": "0.2.1", - "mermaid": "8.9.2", - "mini-css-extract-plugin": "1.5.0", - "monaco-editor": "0.23.0", - "monaco-editor-webpack-plugin": "3.0.1", - "postcss": "8.2.12", + "mermaid": "8.10.1", + "mini-css-extract-plugin": "1.6.0", + "monaco-editor": "0.24.0", + "monaco-editor-webpack-plugin": "3.1.0", "pretty-ms": "7.0.1", "sortablejs": "1.13.0", - "swagger-ui-dist": "3.47.1", + "swagger-ui-dist": "3.48.0", "tributejs": "5.1.3", "vue": "2.6.12", - "vue-bar-graph": "1.2.0", + "vue-bar-graph": "1.2.2", "vue-calendar-heatmap": "0.8.4", - "vue-loader": "15.9.6", + "vue-loader": "15.9.7", "vue-template-compiler": "2.6.12", - "webpack": "5.35.0", - "webpack-cli": "4.6.0", + "webpack": "5.37.0", + "webpack-cli": "4.7.0", "workbox-routing": "6.1.5", "workbox-strategies": "6.1.5", "worker-loader": "3.0.8", @@ -47,17 +44,17 @@ }, "devDependencies": { "editorconfig-checker": "4.0.2", - "eslint": "7.24.0", + "eslint": "7.26.0", "eslint-plugin-html": "6.1.2", "eslint-plugin-import": "2.22.1", - "eslint-plugin-unicorn": "30.0.0", + "eslint-plugin-unicorn": "32.0.1", "eslint-plugin-vue": "7.9.0", "jest": "26.6.3", "jest-extended": "0.11.5", - "stylelint": "13.12.0", - "stylelint-config-standard": "21.0.0", + "stylelint": "13.13.1", + "stylelint-config-standard": "22.0.0", "svgo": "2.3.0", - "updates": "12.0.2" + "updates": "12.1.0" }, "engines": { "node": ">= 12.17.0" @@ -67,31 +64,32 @@ "version": "7.12.11", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz", "integrity": "sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==", + "dev": true, "dependencies": { "@babel/highlight": "^7.10.4" } }, "node_modules/@babel/compat-data": { - "version": "7.13.15", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.13.15.tgz", - "integrity": "sha512-ltnibHKR1VnrU4ymHyQ/CXtNXI6yZC0oJThyW78Hft8XndANwi+9H+UIklBDraIjFEJzw8wmcM427oDd9KS5wA==", + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.14.0.tgz", + "integrity": "sha512-vu9V3uMM/1o5Hl5OekMUowo3FqXLJSw+s+66nt0fSWVWTtmosdzn45JHOB3cPtZoe6CTBDzvSw0RdOY85Q37+Q==", "dev": true }, "node_modules/@babel/core": { - "version": "7.13.16", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.13.16.tgz", - "integrity": "sha512-sXHpixBiWWFti0AV2Zq7avpTasr6sIAu7Y396c608541qAU2ui4a193m0KSQmfPSKFZLnQ3cvlKDOm3XkuXm3Q==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.14.2.tgz", + "integrity": "sha512-OgC1mON+l4U4B4wiohJlQNUU3H73mpTyYY3j/c8U9dr9UagGGSm+WFpzjy/YLdoyjiG++c1kIDgxCo/mLwQJeQ==", "dev": true, "dependencies": { "@babel/code-frame": "^7.12.13", - "@babel/generator": "^7.13.16", + "@babel/generator": "^7.14.2", "@babel/helper-compilation-targets": "^7.13.16", - "@babel/helper-module-transforms": "^7.13.14", - "@babel/helpers": "^7.13.16", - "@babel/parser": "^7.13.16", + "@babel/helper-module-transforms": "^7.14.2", + "@babel/helpers": "^7.14.0", + "@babel/parser": "^7.14.2", "@babel/template": "^7.12.13", - "@babel/traverse": "^7.13.15", - "@babel/types": "^7.13.16", + "@babel/traverse": "^7.14.2", + "@babel/types": "^7.14.2", "convert-source-map": "^1.7.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -125,23 +123,14 @@ "semver": "bin/semver.js" } }, - "node_modules/@babel/core/node_modules/source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/@babel/eslint-parser": { - "version": "7.13.14", - "resolved": "https://registry.npmjs.org/@babel/eslint-parser/-/eslint-parser-7.13.14.tgz", - "integrity": "sha512-I0HweR36D73Ibn/FfrRDMKlMqJHFwidIUgYdMpH+aXYuQC+waq59YaJ6t9e9N36axJ82v1jR041wwqDrDXEwRA==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/eslint-parser/-/eslint-parser-7.14.2.tgz", + "integrity": "sha512-g1YXHASb84MvEkReG/nZ74emTPAMjip1Ey6azZqKTEWidpgEzPGl/uoc6IPJjaMGw424u40sNm1V70tuYOQmeA==", "dev": true, "dependencies": { "eslint-scope": "^5.1.0", - "eslint-visitor-keys": "^1.3.0", + "eslint-visitor-keys": "^2.1.0", "semver": "^6.3.0" }, "engines": { @@ -152,15 +141,6 @@ "eslint": ">=7.5.0" } }, - "node_modules/@babel/eslint-parser/node_modules/eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", - "dev": true, - "engines": { - "node": ">=4" - } - }, "node_modules/@babel/eslint-parser/node_modules/semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", @@ -171,25 +151,16 @@ } }, "node_modules/@babel/generator": { - "version": "7.13.16", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.13.16.tgz", - "integrity": "sha512-grBBR75UnKOcUWMp8WoDxNsWCFl//XCK6HWTrBQKTr5SV9f5g0pNOjdyzi/DTBv12S9GnYPInIXQBTky7OXEMg==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.14.2.tgz", + "integrity": "sha512-OnADYbKrffDVai5qcpkMxQ7caomHOoEwjkouqnN2QhydAjowFAZcsdecFIRUBdb+ZcruwYE4ythYmF1UBZU5xQ==", "dev": true, "dependencies": { - "@babel/types": "^7.13.16", + "@babel/types": "^7.14.2", "jsesc": "^2.5.1", "source-map": "^0.5.0" } }, - "node_modules/@babel/generator/node_modules/source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/@babel/helper-compilation-targets": { "version": "7.13.16", "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.13.16.tgz", @@ -215,14 +186,14 @@ } }, "node_modules/@babel/helper-function-name": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.12.13.tgz", - "integrity": "sha512-TZvmPn0UOqmvi5G4vvw0qZTpVptGkB1GL61R6lKvrSdIxGm5Pky7Q3fpKiIkQCAtRCBUwB0PaThlx9vebCDSwA==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.14.2.tgz", + "integrity": "sha512-NYZlkZRydxw+YT56IlhIcS8PAhb+FEUiOzuhFTfqDyPmzAhRge6ua0dQYT/Uh0t/EDHq05/i+e5M2d4XvjgarQ==", "dev": true, "dependencies": { "@babel/helper-get-function-arity": "^7.12.13", "@babel/template": "^7.12.13", - "@babel/types": "^7.12.13" + "@babel/types": "^7.14.2" } }, "node_modules/@babel/helper-get-function-arity": { @@ -253,19 +224,19 @@ } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.13.14", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.13.14.tgz", - "integrity": "sha512-QuU/OJ0iAOSIatyVZmfqB0lbkVP0kDRiKj34xy+QNsnVZi/PA6BoSoreeqnxxa9EHFAIL0R9XOaAR/G9WlIy5g==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.14.2.tgz", + "integrity": "sha512-OznJUda/soKXv0XhpvzGWDnml4Qnwp16GN+D/kZIdLsWoHj05kyu8Rm5kXmMef+rVJZ0+4pSGLkeixdqNUATDA==", "dev": true, "dependencies": { "@babel/helper-module-imports": "^7.13.12", "@babel/helper-replace-supers": "^7.13.12", "@babel/helper-simple-access": "^7.13.12", "@babel/helper-split-export-declaration": "^7.12.13", - "@babel/helper-validator-identifier": "^7.12.11", + "@babel/helper-validator-identifier": "^7.14.0", "@babel/template": "^7.12.13", - "@babel/traverse": "^7.13.13", - "@babel/types": "^7.13.14" + "@babel/traverse": "^7.14.2", + "@babel/types": "^7.14.2" } }, "node_modules/@babel/helper-optimise-call-expression": { @@ -314,9 +285,10 @@ } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.12.11", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz", - "integrity": "sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw==" + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.0.tgz", + "integrity": "sha512-V3ts7zMSu5lfiwWDVWzRDGIN+lnCEUdaXgtVHJgLb1rGaA6jMrtB9EmE7L18foXJIE8Un/A/h6NJfGQp/e1J4A==", + "dev": true }, "node_modules/@babel/helper-validator-option": { "version": "7.12.17", @@ -325,22 +297,23 @@ "dev": true }, "node_modules/@babel/helpers": { - "version": "7.13.17", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.13.17.tgz", - "integrity": "sha512-Eal4Gce4kGijo1/TGJdqp3WuhllaMLSrW6XcL0ulyUAQOuxHcCafZE8KHg9857gcTehsm/v7RcOx2+jp0Ryjsg==", + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.14.0.tgz", + "integrity": "sha512-+ufuXprtQ1D1iZTO/K9+EBRn+qPWMJjZSw/S0KlFrxCw4tkrzv9grgpDHkY9MeQTjTY8i2sp7Jep8DfU6tN9Mg==", "dev": true, "dependencies": { "@babel/template": "^7.12.13", - "@babel/traverse": "^7.13.17", - "@babel/types": "^7.13.17" + "@babel/traverse": "^7.14.0", + "@babel/types": "^7.14.0" } }, "node_modules/@babel/highlight": { - "version": "7.13.10", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.13.10.tgz", - "integrity": "sha512-5aPpe5XQPzflQrFwL1/QoeHkP2MsA4JCntcXHRhEsdsfPVkvPi2w7Qix4iV7t5S/oC9OodGrggd8aco1g3SZFg==", + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.14.0.tgz", + "integrity": "sha512-YSCOwxvTYEIMSGaBQb5kDDsCopDdiUGsqpatp3fOlI4+2HQSkTmEVWnVuySdAC5EWCqSWWTv0ib63RjR7dTBdg==", + "dev": true, "dependencies": { - "@babel/helper-validator-identifier": "^7.12.11", + "@babel/helper-validator-identifier": "^7.14.0", "chalk": "^2.0.0", "js-tokens": "^4.0.0" } @@ -349,6 +322,7 @@ "version": "3.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, "dependencies": { "color-convert": "^1.9.0" }, @@ -360,6 +334,7 @@ "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", @@ -373,6 +348,7 @@ "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, "dependencies": { "color-name": "1.1.3" } @@ -380,12 +356,14 @@ "node_modules/@babel/highlight/node_modules/color-name": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true }, "node_modules/@babel/highlight/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true, "engines": { "node": ">=4" } @@ -394,6 +372,7 @@ "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, "dependencies": { "has-flag": "^3.0.0" }, @@ -402,9 +381,9 @@ } }, "node_modules/@babel/parser": { - "version": "7.13.16", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.13.16.tgz", - "integrity": "sha512-6bAg36mCwuqLO0hbR+z7PHuqWiCeP7Dzg73OpQwsAB1Eb8HnGEz5xYBzCfbu+YjoaJsJs+qheDxVAuqbt3ILEw==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.14.2.tgz", + "integrity": "sha512-IoVDIHpsgE/fu7eXBeRWt8zLbDrSvD7H1gpomOkPpBoEN8KCruCqSDdqo8dddwQQrui30KSvQBaMUOJiuFu6QQ==", "dev": true, "bin": { "parser": "bin/babel-parser.js" @@ -558,9 +537,9 @@ } }, "node_modules/@babel/runtime": { - "version": "7.13.17", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.13.17.tgz", - "integrity": "sha512-NCdgJEelPTSh+FEFylhnP1ylq848l1z9t9N0j1Lfbcw0+KXGjsTvUmkxy+voLLXB5SOKMbLLx4jxYliGrYQseA==", + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.14.0.tgz", + "integrity": "sha512-JELkvo/DlpNdJ7dlyw/eY7E0suy5i5GQH+Vlxaq1nsNJ+H7f4Vtv3jMeCEgRhZZQFXTjldYfQgv2qmM6M1v5wA==", "dependencies": { "regenerator-runtime": "^0.13.4" } @@ -586,17 +565,17 @@ } }, "node_modules/@babel/traverse": { - "version": "7.13.17", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.13.17.tgz", - "integrity": "sha512-BMnZn0R+X6ayqm3C3To7o1j7Q020gWdqdyP50KEoVqaCO2c/Im7sYZSmVgvefp8TTMQ+9CtwuBp0Z1CZ8V3Pvg==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.14.2.tgz", + "integrity": "sha512-TsdRgvBFHMyHOOzcP9S6QU0QQtjxlRpEYOy3mcCO5RgmC305ki42aSAmfZEMSSYBla2oZ9BMqYlncBaKmD/7iA==", "dev": true, "dependencies": { "@babel/code-frame": "^7.12.13", - "@babel/generator": "^7.13.16", - "@babel/helper-function-name": "^7.12.13", + "@babel/generator": "^7.14.2", + "@babel/helper-function-name": "^7.14.2", "@babel/helper-split-export-declaration": "^7.12.13", - "@babel/parser": "^7.13.16", - "@babel/types": "^7.13.17", + "@babel/parser": "^7.14.2", + "@babel/types": "^7.14.2", "debug": "^4.1.0", "globals": "^11.1.0" } @@ -620,12 +599,12 @@ } }, "node_modules/@babel/types": { - "version": "7.13.17", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.13.17.tgz", - "integrity": "sha512-RawydLgxbOPDlTLJNtoIypwdmAy//uQIzlKt2+iBiJaRlVuI6QLUxVAyWGNfOzp8Yu4L4lLIacoCyTNtpb4wiA==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.14.2.tgz", + "integrity": "sha512-SdjAG/3DikRHpUOjxZgnkbR11xUlyDMUFJdvnIgZEE16mqmY0BINMmc4//JMJglEmn6i7sq6p+mGrFWyZ98EEw==", "dev": true, "dependencies": { - "@babel/helper-validator-identifier": "^7.12.11", + "@babel/helper-validator-identifier": "^7.14.0", "to-fast-properties": "^2.0.0" } }, @@ -673,9 +652,9 @@ } }, "node_modules/@eslint/eslintrc": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.0.tgz", - "integrity": "sha512-2ZPCc+uNbjV5ERJr+aKSPRwZgKd2z11x0EgLvb1PURmUrn9QNRXFqje0Ldq454PfAVyaJYyrDvvIKSFP4NnBog==", + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.1.tgz", + "integrity": "sha512-5v7TDE9plVhvxQeWLXDTvFvJBdH6pEsdnl2g/dAptmuFEPedQ4Erq5rsDsX+mvAM610IhNaO2W5V1dOOnDKxkQ==", "dev": true, "dependencies": { "ajv": "^6.12.4", @@ -931,6 +910,15 @@ "node-notifier": "^8.0.0" } }, + "node_modules/@jest/reporters/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/@jest/source-map": { "version": "26.6.2", "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-26.6.2.tgz", @@ -945,6 +933,15 @@ "node": ">= 10.14.2" } }, + "node_modules/@jest/source-map/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/@jest/test-result": { "version": "26.6.2", "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-26.6.2.tgz", @@ -1002,6 +999,15 @@ "node": ">= 10.14.2" } }, + "node_modules/@jest/transform/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/@jest/types": { "version": "26.6.2", "resolved": "https://registry.npmjs.org/@jest/types/-/types-26.6.2.tgz", @@ -1107,6 +1113,7 @@ "version": "0.1.1", "resolved": "https://registry.npmjs.org/@trysound/sax/-/sax-0.1.1.tgz", "integrity": "sha512-Z6DoceYb/1xSg5+e+ZlPZ9v0N16ZvZ+wYMraFue4HYrE4ttONKtsvruIRf6t9TBR0YvSOfi1hUU0fJfBLCDYow==", + "dev": true, "engines": { "node": ">=10.13.0" } @@ -1152,6 +1159,14 @@ "@babel/types": "^7.3.0" } }, + "node_modules/@types/codemirror": { + "version": "0.0.109", + "resolved": "https://registry.npmjs.org/@types/codemirror/-/codemirror-0.0.109.tgz", + "integrity": "sha512-cSdiHeeLjvGn649lRTNeYrVCDOgDrtP+bDDSFDd1TF+i0jKGPDRozno2NOJ9lTniso+taiv4kiVS8dgM8Jm5lg==", + "dependencies": { + "@types/tern": "*" + } + }, "node_modules/@types/eslint": { "version": "7.2.10", "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-7.2.10.tgz", @@ -1219,6 +1234,11 @@ "integrity": "sha1-7ihweulOEdK4J7y+UnC86n8+ce4=", "dev": true }, + "node_modules/@types/marked": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@types/marked/-/marked-2.0.2.tgz", + "integrity": "sha512-P4zanhCQKs4tiWPPBGpB7lHflgFCP9DFGNI5YtpW9MALKoy2qs9rHNWJ+z55cegD9uCfnmsKuaosq9FNvbxrOw==" + }, "node_modules/@types/mdast": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.3.tgz", @@ -1235,9 +1255,9 @@ "dev": true }, "node_modules/@types/node": { - "version": "14.14.41", - "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.41.tgz", - "integrity": "sha512-dueRKfaJL4RTtSa7bWeTK1M+VH+Gns73oCgzvYfHZywRCoPSd8EkXBL0mZ9unPTveBn+D9phZBaxuzpwjWkW0g==" + "version": "15.0.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-15.0.3.tgz", + "integrity": "sha512-/WbxFeBU+0F79z9RdEOXH4CsDga+ibi5M8uEYr91u3CkT/pdWcV8MCook+4wDPnZBexRdwWS+PiVZ2xJviAzcQ==" }, "node_modules/@types/normalize-package-data": { "version": "2.4.0", @@ -1248,7 +1268,8 @@ "node_modules/@types/parse-json": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz", - "integrity": "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==" + "integrity": "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==", + "dev": true }, "node_modules/@types/prettier": { "version": "2.2.3", @@ -1262,6 +1283,14 @@ "integrity": "sha512-RJJrrySY7A8havqpGObOB4W92QXKJo63/jFLLgpvOtsGUqbQZ9Sbgl35KMm1DjC6j7AvmmU2bIno+3IyEaemaw==", "dev": true }, + "node_modules/@types/tern": { + "version": "0.23.3", + "resolved": "https://registry.npmjs.org/@types/tern/-/tern-0.23.3.tgz", + "integrity": "sha512-imDtS4TAoTcXk0g7u4kkWqedB3E4qpjXzCpD2LU5M5NAXHzCDsypyvXSaG7mM8DKYkCRa7tFp4tS/lp/Wo7Q3w==", + "dependencies": { + "@types/estree": "*" + } + }, "node_modules/@types/unist": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.3.tgz", @@ -1383,6 +1412,14 @@ "url": "https://opencollective.com/postcss/" } }, + "node_modules/@vue/component-compiler-utils/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/@vue/component-compiler-utils/node_modules/supports-color": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", @@ -1531,18 +1568,18 @@ } }, "node_modules/@webpack-cli/configtest": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-1.0.2.tgz", - "integrity": "sha512-3OBzV2fBGZ5TBfdW50cha1lHDVf9vlvRXnjpVbJBa20pSZQaSkMJZiwA8V2vD9ogyeXn8nU5s5A6mHyf5jhMzA==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-1.0.3.tgz", + "integrity": "sha512-WQs0ep98FXX2XBAfQpRbY0Ma6ADw8JR6xoIkaIiJIzClGOMqVRvPCWqndTxf28DgFopWan0EKtHtg/5W1h0Zkw==", "peerDependencies": { "webpack": "4.x.x || 5.x.x", "webpack-cli": "4.x.x" } }, "node_modules/@webpack-cli/info": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-1.2.3.tgz", - "integrity": "sha512-lLek3/T7u40lTqzCGpC6CAbY6+vXhdhmwFRxZLMnRm6/sIF/7qMpT8MocXCRQfz0JAh63wpbXLMnsQ5162WS7Q==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-1.2.4.tgz", + "integrity": "sha512-ogE2T4+pLhTTPS/8MM3IjHn0IYplKM4HbVNMCWA9N4NrdPzunwenpCsqKEXyejMfRu6K8mhauIPYf8ZxWG5O6g==", "dependencies": { "envinfo": "^7.7.3" }, @@ -1551,9 +1588,9 @@ } }, "node_modules/@webpack-cli/serve": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-1.3.1.tgz", - "integrity": "sha512-0qXvpeYO6vaNoRBI52/UsbcaBydJCggoBBnIo/ovQQdn6fug0BgwsjorV1hVS7fMqGVTZGcVxv8334gjmbj5hw==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-1.4.0.tgz", + "integrity": "sha512-xgT/HqJ+uLWGX+Mzufusl3cgjAcnqYYskaB7o0vRcwOEfuu6hMzSILQpnIzFMGsTaeaX4Nnekl+6fadLbl1/Vg==", "peerDependencies": { "webpack-cli": "4.x.x" }, @@ -1668,15 +1705,11 @@ "ajv": "^6.9.1" } }, - "node_modules/alphanum-sort": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/alphanum-sort/-/alphanum-sort-1.0.2.tgz", - "integrity": "sha1-l6ERlkmyEa0zaR2fn0hqjsn74KM=" - }, "node_modules/ansi-colors": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true, "engines": { "node": ">=6" } @@ -2006,6 +2039,15 @@ "url": "https://opencollective.com/postcss/" } }, + "node_modules/autoprefixer/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/autoprefixer/node_modules/supports-color": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", @@ -2195,7 +2237,8 @@ "node_modules/boolbase": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", - "integrity": "sha1-aN/1++YMUes3cl6p4+0xDcwed24=" + "integrity": "sha1-aN/1++YMUes3cl6p4+0xDcwed24=", + "dev": true }, "node_modules/brace-expansion": { "version": "1.1.11", @@ -2224,13 +2267,13 @@ "dev": true }, "node_modules/browserslist": { - "version": "4.16.5", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.16.5.tgz", - "integrity": "sha512-C2HAjrM1AI/djrpAUU/tr4pml1DqLIzJKSLDBXBrNErl9ZCCTXdhwxdJjYc16953+mBWf7Lw+uUJgpgb8cN71A==", + "version": "4.16.6", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.16.6.tgz", + "integrity": "sha512-Wspk/PqO+4W9qp5iUTJsa1B/QrYn1keNCcEP5OvP7WBwT4KaDly0uONYmC6Xa3Z5IqnUgS0KcgLYu1l74x0ZXQ==", "dependencies": { - "caniuse-lite": "^1.0.30001214", + "caniuse-lite": "^1.0.30001219", "colorette": "^1.2.2", - "electron-to-chromium": "^1.3.719", + "electron-to-chromium": "^1.3.723", "escalade": "^3.1.1", "node-releases": "^1.1.71" }, @@ -2259,6 +2302,18 @@ "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==" }, + "node_modules/builtin-modules": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.2.0.tgz", + "integrity": "sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA==", + "dev": true, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/cache-base": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz", @@ -2296,6 +2351,7 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, "engines": { "node": ">=6" } @@ -2346,21 +2402,14 @@ "node": ">=6" } }, - "node_modules/caniuse-api": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/caniuse-api/-/caniuse-api-3.0.0.tgz", - "integrity": "sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==", - "dependencies": { - "browserslist": "^4.0.0", - "caniuse-lite": "^1.0.0", - "lodash.memoize": "^4.1.2", - "lodash.uniq": "^4.5.0" - } - }, "node_modules/caniuse-lite": { - "version": "1.0.30001214", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001214.tgz", - "integrity": "sha512-O2/SCpuaU3eASWVaesQirZv1MSjUNOvmugaD8zNSJqw6Vv5SGwoOpA9LJs3pNPfM745nxqPvfZY3MQKY4AKHYg==" + "version": "1.0.30001228", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001228.tgz", + "integrity": "sha512-QQmLOGJ3DEgokHbMSA8cj2a+geXqmnpyOFT0lhQV6P3/YOJvGDEwoedcwxEQ30gJIwIIunHIicunJ2rzK5gB2A==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + } }, "node_modules/capture-exit": { "version": "2.0.0", @@ -2384,6 +2433,7 @@ "version": "4.1.1", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.1.tgz", "integrity": "sha512-diHzdDKxcU+bAsUboHLPEDQiw0qEe0qd7SYUn3HgcFlWgbDcfLGswOHYeGrHKzG9z6UYf01d9VFMfZxPM1xZSg==", + "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -2578,6 +2628,14 @@ "node": ">= 4.0" } }, + "node_modules/clean-css/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/clean-regexp": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/clean-regexp/-/clean-regexp-1.0.0.tgz", @@ -2590,16 +2648,6 @@ "node": ">=4" } }, - "node_modules/clipboard": { - "version": "2.0.8", - "resolved": "https://registry.npmjs.org/clipboard/-/clipboard-2.0.8.tgz", - "integrity": "sha512-Y6WO0unAIQp5bLmk1zdThRhgJt/x3ks6f30s3oE3H1mgIEU33XyQjEf8gsf6DxC7NPX8Y1SsNWjUjL/ywLnnbQ==", - "dependencies": { - "good-listener": "^1.2.2", - "select": "^1.1.2", - "tiny-emitter": "^2.0.0" - } - }, "node_modules/cliui": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", @@ -2692,15 +2740,6 @@ "node": ">=0.10.0" } }, - "node_modules/color": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/color/-/color-3.1.3.tgz", - "integrity": "sha512-xgXAcTHa2HeFCGLE9Xs/R82hujGtu9Jd9x4NW3T34+OMs7VoPsjwzRczKHvTAHeJwWFwX5j15+MgAppE8ztObQ==", - "dependencies": { - "color-convert": "^1.9.1", - "color-string": "^1.5.4" - } - }, "node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -2717,28 +2756,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "node_modules/color-string": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.5.5.tgz", - "integrity": "sha512-jgIoum0OfQfq9Whcfc2z/VhCNcmQjWbey6qBX0vqt7YICflUmBCh9E9CiQD5GSJ+Uehixm3NUwHVhqUAWRivZg==", - "dependencies": { - "color-name": "^1.0.0", - "simple-swizzle": "^0.2.2" - } - }, - "node_modules/color/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/color/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" - }, "node_modules/colorette": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/colorette/-/colorette-1.2.2.tgz", @@ -2818,16 +2835,6 @@ "node": ">=0.10.0" } }, - "node_modules/core-js": { - "version": "3.11.0", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.11.0.tgz", - "integrity": "sha512-bd79DPpx+1Ilh9+30aT5O1sgpQd4Ttg8oqkqi51ZzhedMM1omD2e6IOF48Z/DzDCZ2svp49tN/3vneTK6ZBkXw==", - "hasInstallScript": true, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/core-js" - } - }, "node_modules/core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", @@ -2838,6 +2845,7 @@ "version": "7.0.0", "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.0.0.tgz", "integrity": "sha512-pondGvTuVYDk++upghXJabWzL6Kxu6f26ljFw64Swq9v6sQPUL3EUlVDV56diOjpCayKihL6hVe8exIACU4XcA==", + "dev": true, "dependencies": { "@types/parse-json": "^4.0.0", "import-fresh": "^3.2.1", @@ -2849,6 +2857,33 @@ "node": ">=10" } }, + "node_modules/cosmiconfig/node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cosmiconfig/node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/cross-spawn": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", @@ -2873,28 +2908,6 @@ "node": "*" } }, - "node_modules/css-color-names": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/css-color-names/-/css-color-names-1.0.1.tgz", - "integrity": "sha512-/loXYOch1qU1biStIFsHH8SxTmOseh1IJqFvy8IujXOm1h+QjUdDhkzOrR5HG8K8mlxREj0yfi8ewCHx0eMxzA==", - "engines": { - "node": "*" - } - }, - "node_modules/css-declaration-sorter": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-6.0.0.tgz", - "integrity": "sha512-S0TE4E0ha5+tBHdLWPc5n+S8E4dFBS5xScPvgHkLNZwWvX4ISoFGhGeerLC9uS1cKA/sC+K2wHq6qEbcagT/fg==", - "dependencies": { - "timsort": "^0.3.0" - }, - "engines": { - "node": ">= 10" - }, - "peerDependencies": { - "postcss": "^8.0.9" - } - }, "node_modules/css-loader": { "version": "5.2.4", "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-5.2.4.tgz", @@ -2923,42 +2936,11 @@ "webpack": "^4.27.0 || ^5.0.0" } }, - "node_modules/css-minimizer-webpack-plugin": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-2.0.0.tgz", - "integrity": "sha512-cG/uc94727tx5pBNtb1Sd7gvUPzwmcQi1lkpfqTpdkuNq75hJCw7bIVsCNijLm4dhDcr1atvuysl2rZqOG8Txw==", - "dependencies": { - "cssnano": "^5.0.0", - "jest-worker": "^26.3.0", - "p-limit": "^3.0.2", - "postcss": "^8.2.9", - "schema-utils": "^3.0.0", - "serialize-javascript": "^5.0.1", - "source-map": "^0.6.1" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^5.0.0" - }, - "peerDependenciesMeta": { - "clean-css": { - "optional": true - }, - "csso": { - "optional": true - } - } - }, "node_modules/css-select": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/css-select/-/css-select-3.1.2.tgz", "integrity": "sha512-qmss1EihSuBNWNNhHjxzxSfJoFBM/lERB/Q4EnsJQQC62R2evJDW481091oAdOr9uh46/0n4nrg0It5cAnj1RA==", + "dev": true, "dependencies": { "boolbase": "^1.0.0", "css-what": "^4.0.0", @@ -2974,6 +2956,7 @@ "version": "1.1.3", "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.1.3.tgz", "integrity": "sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q==", + "dev": true, "dependencies": { "mdn-data": "2.0.14", "source-map": "^0.6.1" @@ -2982,10 +2965,20 @@ "node": ">=8.0.0" } }, + "node_modules/css-tree/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/css-what": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/css-what/-/css-what-4.0.0.tgz", "integrity": "sha512-teijzG7kwYfNVsUh2H/YN62xW3KK9YhXEgSlbxMlcyjPNvdKJqFx5lrwlJgoFP1ZHlB89iGDlo/JyshKeRhv5A==", + "dev": true, "engines": { "node": ">= 6" }, @@ -3004,83 +2997,11 @@ "node": ">=4" } }, - "node_modules/cssnano": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-5.0.1.tgz", - "integrity": "sha512-5WubEmKcK2cqw43DUAayRBiIlTdX7iX3ZowrWDVxSVcW3hyohVnbJ4K4mbnWtJp5rfJnUwHg5H4mDAGzmuCM3g==", - "dependencies": { - "cosmiconfig": "^7.0.0", - "cssnano-preset-default": "^5.0.0", - "is-resolvable": "^1.1.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/cssnano" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/cssnano-preset-default": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-5.0.0.tgz", - "integrity": "sha512-zsLppqF7PxY6Tk+ghVx8djf4o1jIOu2GNufqy9lMxldt7gGpSy3FQ6jn7FCd5DZWCaBa7A/1/HVh8CK3BdFSJg==", - "dependencies": { - "css-declaration-sorter": "6.0.0", - "cssnano-utils": "^2.0.0", - "postcss-calc": "^8.0.0", - "postcss-colormin": "^5.0.0", - "postcss-convert-values": "^5.0.0", - "postcss-discard-comments": "^5.0.0", - "postcss-discard-duplicates": "^5.0.0", - "postcss-discard-empty": "^5.0.0", - "postcss-discard-overridden": "^5.0.0", - "postcss-merge-longhand": "^5.0.0", - "postcss-merge-rules": "^5.0.0", - "postcss-minify-font-values": "^5.0.0", - "postcss-minify-gradients": "^5.0.0", - "postcss-minify-params": "^5.0.0", - "postcss-minify-selectors": "^5.0.0", - "postcss-normalize-charset": "^5.0.0", - "postcss-normalize-display-values": "^5.0.0", - "postcss-normalize-positions": "^5.0.0", - "postcss-normalize-repeat-style": "^5.0.0", - "postcss-normalize-string": "^5.0.0", - "postcss-normalize-timing-functions": "^5.0.0", - "postcss-normalize-unicode": "^5.0.0", - "postcss-normalize-url": "^5.0.0", - "postcss-normalize-whitespace": "^5.0.0", - "postcss-ordered-values": "^5.0.0", - "postcss-reduce-initial": "^5.0.0", - "postcss-reduce-transforms": "^5.0.0", - "postcss-svgo": "^5.0.0", - "postcss-unique-selectors": "^5.0.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/cssnano-utils": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-2.0.0.tgz", - "integrity": "sha512-xvxmTszdrvSyTACdPe8VU5J6p4sm3egpgw54dILvNqt5eBUv6TFjACLhSxtRuEsxYrgy8uDy269YjScO5aKbGA==", - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, "node_modules/csso": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/csso/-/csso-4.2.0.tgz", "integrity": "sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA==", + "dev": true, "dependencies": { "css-tree": "^1.1.2" }, @@ -3549,11 +3470,6 @@ "node": ">=0.4.0" } }, - "node_modules/delegate": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/delegate/-/delegate-3.2.0.tgz", - "integrity": "sha512-IofjkYBZaZivn0V8nnsMJGBr4jVLxHDheKSW88PyxS5QC4Vo9ZbZVvhzlSxY87fVq3STR6r+4cGepyHkcWOQSw==" - }, "node_modules/detect-newline": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", @@ -3584,7 +3500,16 @@ "node": ">=8" } }, - "node_modules/doctrine": { + "node_modules/dir-glob/node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/doctrine": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", @@ -3600,6 +3525,7 @@ "version": "1.3.1", "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.3.1.tgz", "integrity": "sha512-Pv2ZluG5ife96udGgEDovOOOA5UELkltfJpnIExPrAk1LTvecolUGn6lIaoLh86d83GiB86CjzciMd9BuRB71Q==", + "dev": true, "dependencies": { "domelementtype": "^2.0.1", "domhandler": "^4.0.0", @@ -3613,6 +3539,7 @@ "version": "2.2.0", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.2.0.tgz", "integrity": "sha512-DtBMo82pv1dFtUmHyr48beiuq792Sxohr+8Hm9zoxklYPfa6n0Z3Byjj2IV7bmr2IyqClnqEQhfgHJJ5QF0R5A==", + "dev": true, "funding": [ { "type": "github", @@ -3645,6 +3572,7 @@ "version": "4.2.0", "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-4.2.0.tgz", "integrity": "sha512-zk7sgt970kzPks2Bf+dwT/PLzghLnsivb9CcxkvR8Mzr66Olr0Ofd8neSbglHJHaHa2MadfoSdNlKYAaafmWfA==", + "dev": true, "dependencies": { "domelementtype": "^2.2.0" }, @@ -3659,6 +3587,7 @@ "version": "2.6.0", "resolved": "https://registry.npmjs.org/domutils/-/domutils-2.6.0.tgz", "integrity": "sha512-y0BezHuy4MDYxh6OvolXYsH+1EMGmFbwv5FKW7ovwMG6zTPWqNPq3WF9ayZssFq+UlKdffGLbOEaghNdaOm1WA==", + "dev": true, "dependencies": { "dom-serializer": "^1.0.1", "domelementtype": "^2.2.0", @@ -3668,30 +3597,21 @@ "url": "https://github.com/fb55/domutils?sponsor=1" } }, - "node_modules/dot-prop": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", - "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", - "dependencies": { - "is-obj": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/dropzone": { "version": "5.9.2", "resolved": "https://registry.npmjs.org/dropzone/-/dropzone-5.9.2.tgz", "integrity": "sha512-5t2z51DzIsWDbTpwcJIvUlwxBbvcwdCApz0yb9ecKJwG155Xm92KMEZmHW1B0MzoXOKvFwdd0nPu5cpeVcvPHQ==" }, "node_modules/easymde": { - "version": "2.14.0", - "resolved": "https://registry.npmjs.org/easymde/-/easymde-2.14.0.tgz", - "integrity": "sha512-yQh3EF1amknaxDhXE1L28kwknREU8S19o01ki0t6Q8ThECCipXTOM3E/LL32Ia5D3AsCBRbC1/fT5tpLniVGuw==", + "version": "2.15.0", + "resolved": "https://registry.npmjs.org/easymde/-/easymde-2.15.0.tgz", + "integrity": "sha512-9jMRIVvKt1d0UjRN45yotUYECAM4xvw0TTAQw8sYDONP++keWJVnd8Xrn+V+vQEN/v9/X0SWEoo1rFSgCooGpw==", "dependencies": { - "codemirror": "^5.59.2", + "@types/codemirror": "0.0.109", + "@types/marked": "^2.0.2", + "codemirror": "^5.61.0", "codemirror-spell-checker": "1.1.2", - "marked": "^2.0.0" + "marked": "^2.0.3" } }, "node_modules/ecc-jsbn": { @@ -3720,9 +3640,9 @@ } }, "node_modules/electron-to-chromium": { - "version": "1.3.719", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.719.tgz", - "integrity": "sha512-heM78GKSqrIzO9Oz0/y22nTBN7bqSP1Pla2SyU9DiSnQD+Ea9SyyN5RWWlgqsqeBLNDkSlE9J9EHFmdMPzxB/g==" + "version": "1.3.727", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.727.tgz", + "integrity": "sha512-Mfz4FIB4FSvEwBpDfdipRIrwd6uo8gUDoRDF4QEYb4h4tSuI3ov594OrjU6on042UlFHouIJpClDODGkPcBSbg==" }, "node_modules/emittery": { "version": "0.7.2", @@ -3759,9 +3679,9 @@ } }, "node_modules/enhanced-resolve": { - "version": "5.8.0", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.8.0.tgz", - "integrity": "sha512-Sl3KRpJA8OpprrtaIswVki3cWPiPKxXuFxJXBp+zNb6s6VwNWwFRUdtmzd2ReUut8n+sCPx7QCtQ7w5wfJhSgQ==", + "version": "5.8.2", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.8.2.tgz", + "integrity": "sha512-F27oB3WuHDzvR2DOGNTaYy0D5o0cnrv8TeI482VM4kYgQd/FT9lUQwuNsJ0oOHtBUq7eiW5ytqzp7nBFknL+GA==", "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" @@ -3774,6 +3694,7 @@ "version": "2.3.6", "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", + "dev": true, "dependencies": { "ansi-colors": "^4.1.1" }, @@ -3785,6 +3706,7 @@ "version": "2.2.0", "resolved": "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz", "integrity": "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==", + "dev": true, "funding": { "url": "https://github.com/fb55/entities?sponsor=1" } @@ -3824,6 +3746,7 @@ "version": "1.3.2", "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, "dependencies": { "is-arrayish": "^0.2.1" } @@ -3881,20 +3804,20 @@ } }, "node_modules/esbuild": { - "version": "0.10.2", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.10.2.tgz", - "integrity": "sha512-/5vsZD7wTJJHC3yNXLUjXNvUDwqwNoIMvFvLd9tcDQ9el5l13pspYm3yufavjIeYvNtAbo+6N/6uoWx9dGA6ug==", + "version": "0.11.20", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.11.20.tgz", + "integrity": "sha512-QOZrVpN/Yz74xfat0H6euSgn3RnwLevY1mJTEXneukz1ln9qB+ieaerRMzSeETpz/UJWsBMzRVR/andBht5WKw==", "hasInstallScript": true, "bin": { "esbuild": "bin/esbuild" } }, "node_modules/esbuild-loader": { - "version": "2.12.0", - "resolved": "https://registry.npmjs.org/esbuild-loader/-/esbuild-loader-2.12.0.tgz", - "integrity": "sha512-wQJ8tryOYC5CsG62scwX92HIlY0kgH+28xPz+3pxGZlexkQJYZF0kN97iVR6pyzGzfGhTPD7pabdqVnYF7HMVw==", + "version": "2.13.0", + "resolved": "https://registry.npmjs.org/esbuild-loader/-/esbuild-loader-2.13.0.tgz", + "integrity": "sha512-gC9lML8RGkTSWG2pJVEOZRLMoIluq1Jd7OzzVkOZKMzbMDMWDhXEwXLs60n+aglnAYa9GVrD/UXjTHkM51nBsg==", "dependencies": { - "esbuild": "^0.10.2", + "esbuild": "^0.11.19", "joycon": "^3.0.1", "json5": "^2.2.0", "loader-utils": "^2.0.0", @@ -4005,6 +3928,16 @@ "node": ">= 0.8.0" } }, + "node_modules/escodegen/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/escodegen/node_modules/type-check": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", @@ -4018,13 +3951,13 @@ } }, "node_modules/eslint": { - "version": "7.24.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.24.0.tgz", - "integrity": "sha512-k9gaHeHiFmGCDQ2rEfvULlSLruz6tgfA8DEn+rY9/oYPFFTlz55mM/Q/Rij1b2Y42jwZiK3lXvNTw6w6TXzcKQ==", + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.26.0.tgz", + "integrity": "sha512-4R1ieRf52/izcZE7AlLy56uIHHDLT74Yzz2Iv2l6kDaYvEu9x+wMB5dZArVL8SYGXSYV2YAg70FcW5Y5nGGNIg==", "dev": true, "dependencies": { "@babel/code-frame": "7.12.11", - "@eslint/eslintrc": "^0.4.0", + "@eslint/eslintrc": "^0.4.1", "ajv": "^6.10.0", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", @@ -4189,17 +4122,17 @@ "dev": true }, "node_modules/eslint-plugin-unicorn": { - "version": "30.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-30.0.0.tgz", - "integrity": "sha512-ZKbE48Ep99z/3geLpkBfv+jNrzr2k7bLqCC/RfZOekZzAvn2/ECDE/d8zGdW1YxHmIC9pevQvm8Pl89v9GEIVw==", + "version": "32.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-32.0.1.tgz", + "integrity": "sha512-LaZ9utnXtOJjnoDkpm+nQsONUUmyRR0WD6PGROSdQRRW3LRmgK/ZP8wxjW+Ai+2uolKTtuJzLx2mvbIeIoLqpg==", "dev": true, "dependencies": { "ci-info": "^3.1.1", "clean-regexp": "^1.0.0", "eslint-template-visitor": "^2.3.2", "eslint-utils": "^2.1.0", - "eslint-visitor-keys": "^2.0.0", "import-modules": "^2.1.0", + "is-builtin-module": "^3.1.0", "lodash": "^4.17.21", "pluralize": "^8.0.0", "read-pkg-up": "^7.0.1", @@ -4209,7 +4142,7 @@ "semver": "^7.3.5" }, "engines": { - "node": ">=10" + "node": ">=12" }, "funding": { "url": "https://github.com/sindresorhus/eslint-plugin-unicorn?sponsor=1" @@ -4218,6 +4151,24 @@ "eslint": ">=7.23.0" } }, + "node_modules/eslint-plugin-unicorn/node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/eslint-plugin-unicorn/node_modules/read-pkg": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", @@ -4339,9 +4290,9 @@ } }, "node_modules/eslint-visitor-keys": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.0.0.tgz", - "integrity": "sha512-QudtT6av5WXels9WjIM7qz1XD1cWGvX4gGXvp/zBn9nXG02D0utdU3Em2m/QjTnrsk6bBjmCygl3rmj118msQQ==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", + "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", "dev": true, "engines": { "node": ">=10" @@ -5027,6 +4978,15 @@ "node": ">=6" } }, + "node_modules/expect/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/expect/node_modules/stack-utils": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-1.0.5.tgz", @@ -5442,9 +5402,9 @@ } }, "node_modules/glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "version": "7.1.7", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", + "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -5591,14 +5551,6 @@ "node": ">=0.6.0" } }, - "node_modules/good-listener": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/good-listener/-/good-listener-1.2.2.tgz", - "integrity": "sha1-1TswzfkxPf+33JoNR3CWqm0UXFA=", - "dependencies": { - "delegate": "^3.1.2" - } - }, "node_modules/graceful-fs": { "version": "4.2.6", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.6.tgz", @@ -5778,27 +5730,12 @@ "he": "bin/he" } }, - "node_modules/hex-color-regex": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/hex-color-regex/-/hex-color-regex-1.1.0.tgz", - "integrity": "sha512-l9sfDFsuqtOqKDsQdqrMRk0U85RZc0RtOR9yPI7mRVOa4FsR/BVnZ0shmQRM96Ji99kYZP/7hn1cedc1+ApsTQ==" - }, "node_modules/hosted-git-info": { "version": "2.8.9", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", "dev": true }, - "node_modules/hsl-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/hsl-regex/-/hsl-regex-1.0.0.tgz", - "integrity": "sha1-1JMwx4ntgZ4nakwNJy3/owsY/m4=" - }, - "node_modules/hsla-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/hsla-regex/-/hsla-regex-1.0.0.tgz", - "integrity": "sha1-wc56MWjIxmFAM6S194d/OyJfnDg=" - }, "node_modules/html-encoding-sniffer": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz", @@ -5949,6 +5886,7 @@ "version": "3.3.0", "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" @@ -6025,11 +5963,6 @@ "node": ">=8" } }, - "node_modules/indexes-of": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/indexes-of/-/indexes-of-1.0.1.tgz", - "integrity": "sha1-8w9xbI4r00bHtn0985FVZqfAVgc=" - }, "node_modules/inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", @@ -6058,14 +5991,6 @@ "node": ">= 0.10" } }, - "node_modules/is-absolute-url": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/is-absolute-url/-/is-absolute-url-3.0.3.tgz", - "integrity": "sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q==", - "engines": { - "node": ">=8" - } - }, "node_modules/is-accessor-descriptor": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", @@ -6105,24 +6030,25 @@ "node_modules/is-arrayish": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=" + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true }, "node_modules/is-bigint": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.1.tgz", - "integrity": "sha512-J0ELF4yHFxHy0cmSxZuheDOz2luOdVvqjwmEcj8H/L1JHeuEDSDbeRP+Dk9kFVk5RTFzbucJ2Kb9F7ixY2QaCg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.2.tgz", + "integrity": "sha512-0JV5+SOCQkIdzjBK9buARcV804Ddu7A0Qet6sHi3FimE9ne6m4BGQZfRn+NZiXbBk4F4XmHfDZIipLj9pX8dSA==", "dev": true, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-boolean-object": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.0.tgz", - "integrity": "sha512-a7Uprx8UtD+HWdyYwnD1+ExtTgqQtD2k/1yJgtXP6wnMm8byhkoTZRl+95LLThpzNZJ5aEvi46cdH+ayMFRwmA==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.1.tgz", + "integrity": "sha512-bXdQWkECBUIAcCkeH1unwJLIpZYaa5VvuygSyS/c2lf719mTKZDU5UdDRlpd01UjADgmW8RfqaP+mRaVPdr/Ng==", "dev": true, "dependencies": { - "call-bind": "^1.0.0" + "call-bind": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -6154,6 +6080,18 @@ "node": ">=4" } }, + "node_modules/is-builtin-module": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-3.1.0.tgz", + "integrity": "sha512-OV7JjAgOTfAFJmHZLvpSTb4qi0nIILDV1gWPYDnDJUTNFM5aGlRAhk4QcT8i7TuAleeEV5Fdkqn3t4mS+Q11fg==", + "dev": true, + "dependencies": { + "builtin-modules": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/is-callable": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.3.tgz", @@ -6184,31 +6122,10 @@ "integrity": "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==", "dev": true }, - "node_modules/is-color-stop": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-color-stop/-/is-color-stop-1.1.0.tgz", - "integrity": "sha1-z/9HGu5N1cnhWFmPvhKWe1za00U=", - "dependencies": { - "css-color-names": "^0.0.4", - "hex-color-regex": "^1.1.0", - "hsl-regex": "^1.0.0", - "hsla-regex": "^1.0.0", - "rgb-regex": "^1.0.1", - "rgba-regex": "^1.0.0" - } - }, - "node_modules/is-color-stop/node_modules/css-color-names": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/css-color-names/-/css-color-names-0.0.4.tgz", - "integrity": "sha1-gIrcLnnPhHOAabZGyyDsJ762KeA=", - "engines": { - "node": "*" - } - }, "node_modules/is-core-module": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.2.0.tgz", - "integrity": "sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ==", + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.4.0.tgz", + "integrity": "sha512-6A2fkfq1rfeQZjxrZJGerpLCTHRNEBiSgnu0+obeJpEPZRUooHgsizvzv0ZjJwOz3iWIHdJtVWJ/tmPr3D21/A==", "dependencies": { "has": "^1.0.3" }, @@ -6229,9 +6146,9 @@ } }, "node_modules/is-date-object": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", - "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.4.tgz", + "integrity": "sha512-/b4ZVsG7Z5XVtIxs/h9W8nvfLgSAyKYdtGWQLbqy6jA1icmgjf8WCoTKgeS4wy5tYaPePouzFMANbnj94c2Z+A==", "dev": true, "engines": { "node": ">= 0.4" @@ -6359,9 +6276,9 @@ } }, "node_modules/is-number-object": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.4.tgz", - "integrity": "sha512-zohwelOAur+5uXtk8O3GPQ1eAcu4ZX3UwxQhUlfFFMNpUd83gXgjbhJh6HmB6LUNV/ieOLQuDwJO3dWJosUeMw==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.5.tgz", + "integrity": "sha512-RU0lI/n95pMoUKu9v1BZP5MBcZuNSVJkMkAG2dJqC4z2GlkGUNeH68SuHuBKBD/XFe+LHZ+f9BKkLET60Niedw==", "dev": true, "engines": { "node": ">= 0.4" @@ -6370,14 +6287,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-obj": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", - "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", - "engines": { - "node": ">=8" - } - }, "node_modules/is-plain-obj": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", @@ -6405,13 +6314,13 @@ "dev": true }, "node_modules/is-regex": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.2.tgz", - "integrity": "sha512-axvdhb5pdhEVThqJzYXwMlVuZwC+FF2DpcOhTS+y/8jVq4trxyPgfcwIxIKiyeuLlSQYKkmUaPQJ8ZE4yNKXDg==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.3.tgz", + "integrity": "sha512-qSVXFz28HM7y+IWX6vLCsexdlvzT1PJNFSBuaQLQ5o0IEw8UDYW6/2+eCMVyIsbM8CNLX2a/QWmSpyxYEHY7CQ==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "has-symbols": "^1.0.1" + "has-symbols": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -6429,11 +6338,6 @@ "node": ">=6" } }, - "node_modules/is-resolvable": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-resolvable/-/is-resolvable-1.1.0.tgz", - "integrity": "sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg==" - }, "node_modules/is-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz", @@ -6443,9 +6347,9 @@ } }, "node_modules/is-string": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz", - "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.6.tgz", + "integrity": "sha512-2gdzbKUuqtQ3lYNrUTQYoClPhm7oQu4UdpSZMp1/DGgkHBT8E2Z1l0yMdb6D4zNAxwDiMv8MdulKROJGNl0Q0w==", "dev": true, "engines": { "node": ">= 0.4" @@ -6455,12 +6359,12 @@ } }, "node_modules/is-symbol": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", - "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", + "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", "dev": true, "dependencies": { - "has-symbols": "^1.0.1" + "has-symbols": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -6600,6 +6504,15 @@ "node": ">=8" } }, + "node_modules/istanbul-lib-source-maps/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/istanbul-reports": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.0.2.tgz", @@ -7103,6 +7016,24 @@ "node": ">= 10.14.2" } }, + "node_modules/jest-resolve/node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/jest-resolve/node_modules/read-pkg": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", @@ -7389,7 +7320,8 @@ "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true }, "node_modules/js-yaml": { "version": "3.14.1", @@ -7456,9 +7388,9 @@ } }, "node_modules/jsdom/node_modules/acorn": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.1.1.tgz", - "integrity": "sha512-xYiIVjNuqtKXMxlRMDc6mZUhXehod4a3gbZ1qRlM7icK4EbxUFNLhWoPblCvFtB2Y9CIqHP3CF/rdxLItaQv8g==", + "version": "8.2.4", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.2.4.tgz", + "integrity": "sha512-Ibt84YwBDDA890eDiDCEqcbwvHlBvzzDkU2cGBBDDI1QWT12jTiXIOn2CIw5KK4i6N5Z2HUxwYjzriDyqaqqZg==", "dev": true, "bin": { "acorn": "bin/acorn" @@ -7487,7 +7419,8 @@ "node_modules/json-parse-even-better-errors": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", - "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==" + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true }, "node_modules/json-schema": { "version": "0.2.3", @@ -7652,6 +7585,15 @@ "semver": "bin/semver" } }, + "node_modules/less/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/leven": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", @@ -7701,6 +7643,14 @@ "semver": "bin/semver.js" } }, + "node_modules/license-checker-webpack-plugin/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/license-checker-webpack-plugin/node_modules/webpack-sources": { "version": "1.4.3", "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-1.4.3.tgz", @@ -7726,7 +7676,8 @@ "node_modules/lines-and-columns": { "version": "1.1.6", "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.1.6.tgz", - "integrity": "sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA=" + "integrity": "sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA=", + "dev": true }, "node_modules/load-json-file": { "version": "2.0.0", @@ -7743,18 +7694,6 @@ "node": ">=4" } }, - "node_modules/load-json-file/node_modules/parse-json": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", - "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", - "dev": true, - "dependencies": { - "error-ex": "^1.2.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/load-json-file/node_modules/strip-bom": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", @@ -7812,17 +7751,6 @@ "integrity": "sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=", "dev": true }, - "node_modules/lodash.flatten": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz", - "integrity": "sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8=", - "dev": true - }, - "node_modules/lodash.memoize": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", - "integrity": "sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4=" - }, "node_modules/lodash.template": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/lodash.template/-/lodash.template-4.5.0.tgz", @@ -7846,11 +7774,6 @@ "integrity": "sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM=", "dev": true }, - "node_modules/lodash.uniq": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz", - "integrity": "sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=" - }, "node_modules/log-symbols": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", @@ -8028,7 +7951,8 @@ "node_modules/mdn-data": { "version": "2.0.14", "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz", - "integrity": "sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==" + "integrity": "sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==", + "dev": true }, "node_modules/meow": { "version": "9.0.0", @@ -8083,6 +8007,24 @@ "node": ">=10" } }, + "node_modules/meow/node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/meow/node_modules/read-pkg": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", @@ -8180,6 +8122,14 @@ "source-map": "^0.6.1" } }, + "node_modules/merge-source-map/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/merge-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", @@ -8194,9 +8144,9 @@ } }, "node_modules/mermaid": { - "version": "8.9.2", - "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-8.9.2.tgz", - "integrity": "sha512-XWEaraDRDlHZexdeHSSr/MH4VJAOksRSPudchi69ecZJ7IUjjlzHsg32n4ZwJUh6lFO+NMYLHwHNNYUyxIjGPg==", + "version": "8.10.1", + "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-8.10.1.tgz", + "integrity": "sha512-KxwKEJDKy303TQdz5TQMFb/4u+gUL21CefUMGOfuigDh9powcYaNmuJ5BkHmO0jB3Y1z2zlsuKvHZ2CusWH5+A==", "dependencies": { "@braintree/sanitize-url": "^3.1.0", "d3": "^5.7.0", @@ -8292,9 +8242,9 @@ } }, "node_modules/mini-css-extract-plugin": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-1.5.0.tgz", - "integrity": "sha512-SIbuLMv6jsk1FnLIU5OUG/+VMGUprEjM1+o2trOAx8i5KOKMrhyezb1dJ4Ugsykb8Jgq8/w5NEopy6escV9G7g==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-1.6.0.tgz", + "integrity": "sha512-nPFKI7NSy6uONUo9yn2hIfb9vyYvkFu95qki0e21DQ9uaqNKDP15DGpK0KnV6wDroWxPHtExrdEwx/yDQ8nVRw==", "dependencies": { "loader-utils": "^2.0.0", "schema-utils": "^3.0.0", @@ -8311,6 +8261,14 @@ "webpack": "^4.4.0 || ^5.0.0" } }, + "node_modules/mini-css-extract-plugin/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/mini-css-extract-plugin/node_modules/webpack-sources": { "version": "1.4.3", "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-1.4.3.tgz", @@ -8426,19 +8384,19 @@ "integrity": "sha512-9ARkWHBs+6YJIvrIp0Ik5tyTTtP9PoV0Ssu2Ocq5y9v8+NOOpWiRshAp8c4rZVWTOe+157on/5G+zj5pwIQFEQ==" }, "node_modules/monaco-editor": { - "version": "0.23.0", - "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.23.0.tgz", - "integrity": "sha512-q+CP5zMR/aFiMTE9QlIavGyGicKnG2v/H8qVvybLzeFsARM8f6G9fL0sMST2tyVYCwDKkGamZUI6647A0jR/Lg==" + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.24.0.tgz", + "integrity": "sha512-o1f0Lz6ABFNTtnEqqqvlY9qzNx24rQZx1RgYNQ8SkWkE+Ka63keHH/RqxQ4QhN4fs/UYOnvAtEUZsPrzccH++A==" }, "node_modules/monaco-editor-webpack-plugin": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/monaco-editor-webpack-plugin/-/monaco-editor-webpack-plugin-3.0.1.tgz", - "integrity": "sha512-Hym4HqWgIpyoi9G0spln/b/7rkDKfYwIOrNzo1fHHMc+MLYSwD1JXHwKSDS77X27ZHfVJsEXbMZYdGhSYuVF0w==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/monaco-editor-webpack-plugin/-/monaco-editor-webpack-plugin-3.1.0.tgz", + "integrity": "sha512-TP5NkCAV0OeFTry5k/d60KR7CkhTXL4kgJKtE3BzjgbDb5TGEPEhoKmHBrSa6r7Oc0sNbPLZhKD/TP2ig7A+/A==", "dependencies": { "loader-utils": "^2.0.0" }, "peerDependencies": { - "monaco-editor": "0.22.x || 0.23.x", + "monaco-editor": "0.22.x || 0.23.x || 0.24.x", "webpack": "^4.5.0 || 5.x" } }, @@ -8454,9 +8412,9 @@ "dev": true }, "node_modules/nanoid": { - "version": "3.1.22", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.22.tgz", - "integrity": "sha512-/2ZUaJX2ANuLtTvqTlgqBQNJoQO398KyJgZloL0PZkC0dpysjncRUPsFe3DUPzz/y3h+u7C46np8RMuvF3jsSQ==", + "version": "3.1.23", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.23.tgz", + "integrity": "sha512-FiB0kzdP0FFVGDKlRLEQ1BgDzU87dy5NnzjeW9YZNt+/c3+q82EQDUwniSAUxp/F0gFNI1ZhKU1FqYsMuqZVnw==", "bin": { "nanoid": "bin/nanoid.cjs" }, @@ -8626,14 +8584,6 @@ "integrity": "sha1-0LFF62kRicY6eNIB3E/bEpPvDAM=", "dev": true }, - "node_modules/normalize-url": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-4.5.0.tgz", - "integrity": "sha512-2s47yzUxdexf1OhyRi4Em83iQk0aPvwTddtFz4hnSSw9dCEsLEGf6SwIO8ss/19S9iBb5sJaOuTvTGDeZI00BQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/npm-run-path": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", @@ -8649,6 +8599,7 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.0.0.tgz", "integrity": "sha512-i4sc/Kj8htBrAiH1viZ0TgU8Y5XqCaV/FziYK6TBczxmeKm3AEFWqqF3195yKudrarqy7Zu80Ra5dobFjn9X/Q==", + "dev": true, "dependencies": { "boolbase": "^1.0.0" }, @@ -8777,9 +8728,9 @@ } }, "node_modules/object-inspect": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.10.2.tgz", - "integrity": "sha512-gz58rdPpadwztRrPjZE9DZLOABUpTGdcANUgOwBFO1C+HZZhePoP83M65WGDmbpwFYJSWqavbl4SgDn4k8RYTA==", + "version": "1.10.3", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.10.3.tgz", + "integrity": "sha512-e5mCJlSH7poANfC8z8S9s9S2IN5/4Zb3aZ33f5s8YqoazCFzNLloLU8r5VCG+G7WoqLvAAZoVMcy3tp/3X0Plw==", "dev": true, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -8915,14 +8866,14 @@ } }, "node_modules/p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "dependencies": { - "yocto-queue": "^0.1.0" + "p-try": "^2.0.0" }, "engines": { - "node": ">=10" + "node": ">=6" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -8939,20 +8890,6 @@ "node": ">=8" } }, - "node_modules/p-locate/node_modules/p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", - "dependencies": { - "p-try": "^2.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/p-try": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", @@ -8973,6 +8910,7 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, "dependencies": { "callsites": "^3.0.0" }, @@ -8999,20 +8937,15 @@ } }, "node_modules/parse-json": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", - "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", + "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "dev": true, "dependencies": { - "@babel/code-frame": "^7.0.0", - "error-ex": "^1.3.1", - "json-parse-even-better-errors": "^2.3.0", - "lines-and-columns": "^1.1.6" + "error-ex": "^1.2.0" }, "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=0.10.0" } }, "node_modules/parse-ms": { @@ -9076,11 +9009,15 @@ "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" }, "node_modules/path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", + "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", + "dev": true, + "dependencies": { + "pify": "^2.0.0" + }, "engines": { - "node": ">=8" + "node": ">=4" } }, "node_modules/performance-now": { @@ -9229,12 +9166,12 @@ } }, "node_modules/postcss": { - "version": "8.2.12", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.2.12.tgz", - "integrity": "sha512-BJnGT5+0q2tzvs6oQfnY2NpEJ7rIXNfBnZtQOKCIsweeWXBXeDd5k31UgTdS3d/c02ouspufn37mTaHWkJyzMQ==", + "version": "8.2.15", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.2.15.tgz", + "integrity": "sha512-2zO3b26eJD/8rb106Qu2o7Qgg52ND5HPjcyQiK2B98O388h43A448LCslC0dI2P97wCAQRJsFvwTRcXxTKds+Q==", "dependencies": { "colorette": "^1.2.2", - "nanoid": "^3.1.22", + "nanoid": "^3.1.23", "source-map": "^0.6.1" }, "engines": { @@ -9245,92 +9182,6 @@ "url": "https://opencollective.com/postcss/" } }, - "node_modules/postcss-calc": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-8.0.0.tgz", - "integrity": "sha512-5NglwDrcbiy8XXfPM11F3HeC6hoT9W7GUH/Zi5U/p7u3Irv4rHhdDcIZwG0llHXV4ftsBjpfWMXAnXNl4lnt8g==", - "dependencies": { - "postcss-selector-parser": "^6.0.2", - "postcss-value-parser": "^4.0.2" - }, - "peerDependencies": { - "postcss": "^8.2.2" - } - }, - "node_modules/postcss-colormin": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-5.0.0.tgz", - "integrity": "sha512-Yt84+5V6CgS/AhK7d7MA58vG8dSZ7+ytlRtWLaQhag3HXOncTfmYpuUOX4cDoXjvLfw1sHRCHMiBjYhc35CymQ==", - "dependencies": { - "browserslist": "^4.16.0", - "color": "^3.1.1", - "postcss-value-parser": "^4.1.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-convert-values": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-5.0.0.tgz", - "integrity": "sha512-V5kmYm4xoBAjNs+eHY/6XzXJkkGeg4kwNf2ocfqhLb1WBPEa4oaSmoi1fnVO7Dkblqvus9h+AenDvhCKUCK7uQ==", - "dependencies": { - "postcss-value-parser": "^4.1.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-discard-comments": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-5.0.0.tgz", - "integrity": "sha512-Umig6Gxs8m20RihiXY6QkePd6mp4FxkA1Dg+f/Kd6uw0gEMfKRjDeQOyFkLibexbJJGHpE3lrN/Q0R9SMrUMbQ==", - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-discard-duplicates": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-5.0.0.tgz", - "integrity": "sha512-vEJJ+Y3pFUnO1FyCBA6PSisGjHtnphL3V6GsNvkASq/VkP3OX5/No5RYXXLxHa2QegStNzg6HYrYdo71uR4caQ==", - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-discard-empty": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-5.0.0.tgz", - "integrity": "sha512-+wigy099Y1xZxG36WG5L1f2zeH1oicntkJEW4TDIqKKDO2g9XVB3OhoiHTu08rDEjLnbcab4rw0BAccwi2VjiQ==", - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-discard-overridden": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-5.0.0.tgz", - "integrity": "sha512-hybnScTaZM2iEA6kzVQ6Spozy7kVdLw+lGw8hftLlBEzt93uzXoltkYp9u0tI8xbfhxDLTOOzHsHQCkYdmzRUg==", - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, "node_modules/postcss-html": { "version": "0.36.0", "resolved": "https://registry.npmjs.org/postcss-html/-/postcss-html-0.36.0.tgz", @@ -9512,6 +9363,15 @@ "url": "https://opencollective.com/postcss/" } }, + "node_modules/postcss-less/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/postcss-less/node_modules/supports-color": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", @@ -9530,116 +9390,6 @@ "integrity": "sha1-J7Ocb02U+Bsac7j3Y1HGCeXO8kQ=", "dev": true }, - "node_modules/postcss-merge-longhand": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-5.0.0.tgz", - "integrity": "sha512-VZNFA40K8BYHzJNA6jHPdg1Nofsz/nK5Dkszrcb5IgWcLroSBZOD6I/iNQzpejSU/3XwpOiZNaYAdBV4KcvxWA==", - "dependencies": { - "css-color-names": "^1.0.1", - "postcss-value-parser": "^4.1.0", - "stylehacks": "^5.0.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-merge-rules": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-5.0.0.tgz", - "integrity": "sha512-TfsXbKjNYCGfUPEXGIGPySnMiJbdS+3gcVeV8gwmJP4RajyKZHW8E0FYDL1WmggTj3hi+m+WUCAvqRpX2ut4Kg==", - "dependencies": { - "browserslist": "^4.16.0", - "caniuse-api": "^3.0.0", - "cssnano-utils": "^2.0.0", - "postcss-selector-parser": "^6.0.4", - "vendors": "^1.0.3" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-minify-font-values": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-5.0.0.tgz", - "integrity": "sha512-zi2JhFaMOcIaNxhndX5uhsqSY1rexKDp23wV8EOmC9XERqzLbHsoRye3aYF716Zm+hkcR4loqKDt8LZlmihwAg==", - "dependencies": { - "postcss-value-parser": "^4.1.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-minify-gradients": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-5.0.0.tgz", - "integrity": "sha512-/jPtNgs6JySMwgsE5dPOq8a2xEopWTW3RyqoB9fLqxgR+mDUNLSi7joKd+N1z7FXWgVkc4l/dEBMXHgNAaUbvg==", - "dependencies": { - "cssnano-utils": "^2.0.0", - "is-color-stop": "^1.1.0", - "postcss-value-parser": "^4.1.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-minify-params": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-5.0.0.tgz", - "integrity": "sha512-KvZYIxTPBVKjdd+XgObq9A+Sfv8lMkXTpbZTsjhr42XbfWIeLaTItMlygsDWfjArEc3muUfDaUFgNSeDiJ5jug==", - "dependencies": { - "alphanum-sort": "^1.0.2", - "browserslist": "^4.16.0", - "cssnano-utils": "^2.0.0", - "postcss-value-parser": "^4.1.0", - "uniqs": "^2.0.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-minify-selectors": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-5.0.0.tgz", - "integrity": "sha512-cEM0O0eWwFIvmo6nfB0lH0vO/XFwgqIvymODbfPXZ1gTA3i76FKnb7TGUrEpiTxaXH6tgYQ6DcTHwRiRS+YQLQ==", - "dependencies": { - "alphanum-sort": "^1.0.2", - "postcss-selector-parser": "^3.1.2" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-minify-selectors/node_modules/postcss-selector-parser": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-3.1.2.tgz", - "integrity": "sha512-h7fJ/5uWuRVyOtkO45pnt1Ih40CEleeyCHzipqAZO2e5H20g25Y48uYnFUiShvY4rZWNJ/Bib/KVPmanaCtOhA==", - "dependencies": { - "dot-prop": "^5.2.0", - "indexes-of": "^1.0.1", - "uniq": "^1.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/postcss-modules-extract-imports": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz", @@ -9695,180 +9445,6 @@ "postcss": "^8.1.0" } }, - "node_modules/postcss-normalize-charset": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-5.0.0.tgz", - "integrity": "sha512-pqsCkgo9KmQP0ew6DqSA+uP9YN6EfsW20pQ3JU5JoQge09Z6Too4qU0TNDsTNWuEaP8SWsMp+19l15210MsDZQ==", - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-normalize-display-values": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-5.0.0.tgz", - "integrity": "sha512-t4f2d//gH1f7Ns0Jq3eNdnWuPT7TeLuISZ6RQx4j8gpl5XrhkdshdNcOnlrEK48YU6Tcb6jqK7dorME3N4oOGA==", - "dependencies": { - "cssnano-utils": "^2.0.0", - "postcss-value-parser": "^4.1.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-normalize-positions": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-5.0.0.tgz", - "integrity": "sha512-0o6/qU5ky74X/eWYj/tv4iiKCm3YqJnrhmVADpIMNXxzFZywsSQxl8F7cKs8jQEtF3VrJBgcDHTexZy1zgDoYg==", - "dependencies": { - "postcss-value-parser": "^4.1.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-normalize-repeat-style": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.0.0.tgz", - "integrity": "sha512-KRT14JbrXKcFMYuc4q7lh8lvv8u22wLyMrq+UpHKLtbx2H/LOjvWXYdoDxmNrrrJzomAWL+ViEXr48/IhSUJnQ==", - "dependencies": { - "cssnano-utils": "^2.0.0", - "postcss-value-parser": "^4.1.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-normalize-string": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-5.0.0.tgz", - "integrity": "sha512-wSO4pf7GNcDZpmelREWYADF1+XZWrAcbFLQCOqoE92ZwYgaP/RLumkUTaamEzdT2YKRZAH8eLLKGWotU/7FNPw==", - "dependencies": { - "postcss-value-parser": "^4.1.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-normalize-timing-functions": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.0.0.tgz", - "integrity": "sha512-TwPaDX+wl9wO3MUm23lzGmOzGCGKnpk+rSDgzB2INpakD5dgWR3L6bJq1P1LQYzBAvz8fRIj2NWdnZdV4EV98Q==", - "dependencies": { - "cssnano-utils": "^2.0.0", - "postcss-value-parser": "^4.1.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-normalize-unicode": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-5.0.0.tgz", - "integrity": "sha512-2CpVoz/67rXU5s9tsPZDxG1YGS9OFHwoY9gsLAzrURrCxTAb0H7Vp87/62LvVPgRWTa5ZmvgmqTp2rL8tlm72A==", - "dependencies": { - "browserslist": "^4.16.0", - "postcss-value-parser": "^4.1.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-normalize-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-5.0.0.tgz", - "integrity": "sha512-ICDaGFBqLgA3dlrCIRuhblLl80D13YtgEV9NJPTYJtgR72vu61KgxAHv+z/lKMs1EbwfSQa3ALjOFLSmXiE34A==", - "dependencies": { - "is-absolute-url": "^3.0.3", - "normalize-url": "^4.5.0", - "postcss-value-parser": "^4.1.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-normalize-whitespace": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.0.0.tgz", - "integrity": "sha512-KRnxQvQAVkJfaeXSz7JlnD9nBN9sFZF9lrk9452Q2uRoqrRSkinqifF8Iex7wZGei2DZVG/qpmDFDmRvbNAOGA==", - "dependencies": { - "postcss-value-parser": "^4.1.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-ordered-values": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-5.0.0.tgz", - "integrity": "sha512-dPr+SRObiHueCIc4IUaG0aOGQmYkuNu50wQvdXTGKy+rzi2mjmPsbeDsheLk5WPb9Zyf2tp8E+I+h40cnivm6g==", - "dependencies": { - "cssnano-utils": "^2.0.0", - "postcss-value-parser": "^4.1.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-reduce-initial": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-5.0.0.tgz", - "integrity": "sha512-wR6pXUaFbSMG1oCKx8pKVA+rnSXCHlca5jMrlmkmif+uig0HNUTV9oGN5kjKsM3mATQAldv2PF9Tbl2vqLFjnA==", - "dependencies": { - "browserslist": "^4.16.0", - "caniuse-api": "^3.0.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, - "node_modules/postcss-reduce-transforms": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-5.0.0.tgz", - "integrity": "sha512-iHdGODW4YzM3WjVecBhPQt6fpJC4lGQZxJKjkBNHpp2b8dzmvj0ogKThqya+IRodQEFzjfXgYeESkf172FH5Lw==", - "dependencies": { - "cssnano-utils": "^2.0.0", - "postcss-value-parser": "^4.1.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, "node_modules/postcss-resolve-nested-selector": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/postcss-resolve-nested-selector/-/postcss-resolve-nested-selector-0.1.1.tgz", @@ -9967,6 +9543,15 @@ "url": "https://opencollective.com/postcss/" } }, + "node_modules/postcss-safe-parser/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/postcss-safe-parser/node_modules/supports-color": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", @@ -10069,6 +9654,15 @@ "url": "https://opencollective.com/postcss/" } }, + "node_modules/postcss-sass/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/postcss-sass/node_modules/supports-color": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", @@ -10173,6 +9767,15 @@ "url": "https://opencollective.com/postcss/" } }, + "node_modules/postcss-scss/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/postcss-scss/node_modules/supports-color": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", @@ -10186,9 +9789,9 @@ } }, "node_modules/postcss-selector-parser": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.5.tgz", - "integrity": "sha512-aFYPoYmXbZ1V6HZaSvat08M97A8HqO6Pjz+PiNpw/DhuRrC72XWAdp3hL6wusDCN31sSmcZyMGa2hZEuX+Xfhg==", + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.6.tgz", + "integrity": "sha512-9LXrvaaX3+mcv5xkg5kFwqSzSH1JIObIx51PrndZwlmznwXRfxMddDvo9gve3gVR8ZTKgoFDdWkbRFmEhT4PMg==", "dependencies": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" @@ -10197,21 +9800,6 @@ "node": ">=4" } }, - "node_modules/postcss-svgo": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-5.0.0.tgz", - "integrity": "sha512-M3/VS4sFI1Yp9g0bPL+xzzCNz5iLdRUztoFaugMit5a8sMfkVzzhwqbsOlD8IFFymCdJDmXmh31waYHWw1K4BA==", - "dependencies": { - "postcss-value-parser": "^4.1.0", - "svgo": "^2.3.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, "node_modules/postcss-syntax": { "version": "0.36.2", "resolved": "https://registry.npmjs.org/postcss-syntax/-/postcss-syntax-0.36.2.tgz", @@ -10221,27 +9809,19 @@ "postcss": ">=5.0.0" } }, - "node_modules/postcss-unique-selectors": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-5.0.0.tgz", - "integrity": "sha512-o9l4pF8SRn7aCMTmzb/kNv/kjV7wPZpZ8Nlb1Gq8v/Qvw969K1wanz1RVA0ehHzWe9+wHXaC2DvZlak/gdMJ5w==", - "dependencies": { - "alphanum-sort": "^1.0.2", - "postcss-selector-parser": "^6.0.2", - "uniqs": "^2.0.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, "node_modules/postcss-value-parser": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.1.0.tgz", "integrity": "sha512-97DXOFbQJhk71ne5/Mt6cOu6yxsSfM0QGQyl0L25Gca4yGWEGJaig7l7gbCX623VqTBNGLRLaVUCnNkcedlRSQ==" }, + "node_modules/postcss/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/prelude-ls": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", @@ -10494,18 +10074,6 @@ "node": ">=4" } }, - "node_modules/read-pkg/node_modules/path-type": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", - "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", - "dev": true, - "dependencies": { - "pify": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/readable-stream": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", @@ -10830,6 +10398,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, "engines": { "node": ">=4" } @@ -10859,16 +10428,6 @@ "node": ">=0.10.0" } }, - "node_modules/rgb-regex": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/rgb-regex/-/rgb-regex-1.0.1.tgz", - "integrity": "sha1-wODWiC3w4jviVKR16O3UGRX+rrE=" - }, - "node_modules/rgba-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/rgba-regex/-/rgba-regex-1.0.0.tgz", - "integrity": "sha1-QzdOLiyglosO8VI0YLfXMP8i7rM=" - }, "node_modules/rimraf": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", @@ -11273,11 +10832,6 @@ "url": "https://opencollective.com/webpack" } }, - "node_modules/select": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/select/-/select-1.1.2.tgz", - "integrity": "sha1-DnNQrN7ICxEIUoeG7B1EGNEbOW0=" - }, "node_modules/semver": { "version": "7.3.5", "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", @@ -11384,19 +10938,6 @@ "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==" }, - "node_modules/simple-swizzle": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", - "integrity": "sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo=", - "dependencies": { - "is-arrayish": "^0.3.1" - } - }, - "node_modules/simple-swizzle/node_modules/is-arrayish": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", - "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" - }, "node_modules/sisteransi": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", @@ -11629,15 +11170,6 @@ "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", "dev": true }, - "node_modules/snapdragon/node_modules/source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/sortablejs": { "version": "1.13.0", "resolved": "https://registry.npmjs.org/sortablejs/-/sortablejs-1.13.0.tgz", @@ -11649,9 +11181,10 @@ "integrity": "sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw==" }, "node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "dev": true, "engines": { "node": ">=0.10.0" } @@ -11678,6 +11211,14 @@ "source-map": "^0.6.0" } }, + "node_modules/source-map-support/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/source-map-url": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.1.tgz", @@ -11737,9 +11278,9 @@ "integrity": "sha512-mcdpQFV7UDAgLpXEE/jOMqvK4LBoO0uTQg0uvXUewmEFhpiZx5yJSZITHB8w1ZahKdhfZqP5GPEOKLyEq5p8XA==" }, "node_modules/spdx-satisfies": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/spdx-satisfies/-/spdx-satisfies-5.0.0.tgz", - "integrity": "sha512-/hGhwh20BeGmkA+P/lm06RvXD94JduwNxtx/oX3B5ClPt1/u/m5MCaDNo1tV3Y9laLkQr/NRde63b9lLMhlNfw==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/spdx-satisfies/-/spdx-satisfies-5.0.1.tgz", + "integrity": "sha512-Nwor6W6gzFp8XX4neaKQ7ChV4wmpSh2sSDemMFSzHxpTw460jxFYeOn+jq4ybnSSw/5sc3pjka9MQPouksQNpw==", "dependencies": { "spdx-compare": "^1.0.0", "spdx-expression-parse": "^3.0.0", @@ -11801,7 +11342,8 @@ "node_modules/stable": { "version": "0.1.8", "resolved": "https://registry.npmjs.org/stable/-/stable-0.1.8.tgz", - "integrity": "sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==" + "integrity": "sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==", + "dev": true }, "node_modules/stack-utils": { "version": "2.0.3", @@ -12083,32 +11625,17 @@ "integrity": "sha1-eVjHk+R+MuB9K1yv5cC/jhLneQI=", "dev": true }, - "node_modules/stylehacks": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/stylehacks/-/stylehacks-5.0.0.tgz", - "integrity": "sha512-QOWm6XivDLb+fqffTZP8jrmPmPITVChl2KCY2R05nsCWwLi3VGhCdVc3IVGNwd1zzTt1jPd67zIKjpQfxzQZeA==", - "dependencies": { - "browserslist": "^4.16.0", - "postcss-selector-parser": "^6.0.4" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.1" - } - }, "node_modules/stylelint": { - "version": "13.12.0", - "resolved": "https://registry.npmjs.org/stylelint/-/stylelint-13.12.0.tgz", - "integrity": "sha512-P8O1xDy41B7O7iXaSlW+UuFbE5+ZWQDb61ndGDxKIt36fMH50DtlQTbwLpFLf8DikceTAb3r6nPrRv30wBlzXw==", + "version": "13.13.1", + "resolved": "https://registry.npmjs.org/stylelint/-/stylelint-13.13.1.tgz", + "integrity": "sha512-Mv+BQr5XTUrKqAXmpqm6Ddli6Ief+AiPZkRsIrAoUKFuq/ElkUh9ZMYxXD0iQNZ5ADghZKLOWz1h7hTClB7zgQ==", "dev": true, "dependencies": { "@stylelint/postcss-css-in-js": "^0.37.2", "@stylelint/postcss-markdown": "^0.36.2", "autoprefixer": "^9.8.6", - "balanced-match": "^1.0.0", - "chalk": "^4.1.0", + "balanced-match": "^2.0.0", + "chalk": "^4.1.1", "cosmiconfig": "^7.0.0", "debug": "^4.3.1", "execall": "^2.0.0", @@ -12117,7 +11644,7 @@ "file-entry-cache": "^6.0.1", "get-stdin": "^8.0.0", "global-modules": "^2.0.0", - "globby": "^11.0.2", + "globby": "^11.0.3", "globjoin": "^0.1.4", "html-tags": "^3.1.0", "ignore": "^5.1.8", @@ -12125,10 +11652,10 @@ "imurmurhash": "^0.1.4", "known-css-properties": "^0.21.0", "lodash": "^4.17.21", - "log-symbols": "^4.0.0", + "log-symbols": "^4.1.0", "mathml-tag-names": "^2.1.3", "meow": "^9.0.0", - "micromatch": "^4.0.2", + "micromatch": "^4.0.4", "normalize-selector": "^0.2.0", "postcss": "^7.0.35", "postcss-html": "^0.36.0", @@ -12138,7 +11665,7 @@ "postcss-safe-parser": "^4.0.2", "postcss-sass": "^0.4.4", "postcss-scss": "^2.1.1", - "postcss-selector-parser": "^6.0.4", + "postcss-selector-parser": "^6.0.5", "postcss-syntax": "^0.36.2", "postcss-value-parser": "^4.1.0", "resolve-from": "^5.0.0", @@ -12149,8 +11676,8 @@ "style-search": "^0.1.0", "sugarss": "^2.0.0", "svg-tags": "^1.0.0", - "table": "^6.0.7", - "v8-compile-cache": "^2.2.0", + "table": "^6.6.0", + "v8-compile-cache": "^2.3.0", "write-file-atomic": "^3.0.3" }, "bin": { @@ -12165,24 +11692,24 @@ } }, "node_modules/stylelint-config-recommended": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/stylelint-config-recommended/-/stylelint-config-recommended-4.0.0.tgz", - "integrity": "sha512-sgna89Ng+25Hr9kmmaIxpGWt2LStVm1xf1807PdcWasiPDaOTkOHRL61sINw0twky7QMzafCGToGDnHT/kTHtQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/stylelint-config-recommended/-/stylelint-config-recommended-5.0.0.tgz", + "integrity": "sha512-c8aubuARSu5A3vEHLBeOSJt1udOdS+1iue7BmJDTSXoCBmfEQmmWX+59vYIj3NQdJBY6a/QRv1ozVFpaB9jaqA==", "dev": true, "peerDependencies": { - "stylelint": "^13.12.0" + "stylelint": "^13.13.0" } }, "node_modules/stylelint-config-standard": { - "version": "21.0.0", - "resolved": "https://registry.npmjs.org/stylelint-config-standard/-/stylelint-config-standard-21.0.0.tgz", - "integrity": "sha512-Yf6mx5oYEbQQJxWuW7X3t1gcxqbUx52qC9SMS3saC2ruOVYEyqmr5zSW6k3wXflDjjFrPhar3kp68ugRopmlzg==", + "version": "22.0.0", + "resolved": "https://registry.npmjs.org/stylelint-config-standard/-/stylelint-config-standard-22.0.0.tgz", + "integrity": "sha512-uQVNi87SHjqTm8+4NIP5NMAyY/arXrBgimaaT7skvRfE9u3JKXRK9KBkbr4pVmeciuCcs64kAdjlxfq6Rur7Hw==", "dev": true, "dependencies": { - "stylelint-config-recommended": "^4.0.0" + "stylelint-config-recommended": "^5.0.0" }, "peerDependencies": { - "stylelint": "^13.12.0" + "stylelint": "^13.13.0" } }, "node_modules/stylelint/node_modules/ansi-styles": { @@ -12197,6 +11724,12 @@ "node": ">=4" } }, + "node_modules/stylelint/node_modules/balanced-match": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-2.0.0.tgz", + "integrity": "sha512-1ugUSr8BHXRnK23KfuYS+gVMC3LB8QGH9W1iGtDPsNWoQbgtXSExkBu2aDR4epiGWZOjZsj6lDl/N/AqqTC3UA==", + "dev": true + }, "node_modules/stylelint/node_modules/color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", @@ -12283,6 +11816,15 @@ "node": ">=8" } }, + "node_modules/stylelint/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/stylelint/node_modules/supports-color": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", @@ -12389,6 +11931,15 @@ "url": "https://opencollective.com/postcss/" } }, + "node_modules/sugarss/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/sugarss/node_modules/supports-color": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", @@ -12440,6 +11991,7 @@ "version": "2.3.0", "resolved": "https://registry.npmjs.org/svgo/-/svgo-2.3.0.tgz", "integrity": "sha512-fz4IKjNO6HDPgIQxu4IxwtubtbSfGEAJUq/IXyTPIkGhWck/faiiwfkvsB8LnBkKLvSoyNNIY6d13lZprJMc9Q==", + "dev": true, "dependencies": { "@trysound/sax": "0.1.1", "chalk": "^4.1.0", @@ -12460,14 +12012,15 @@ "version": "7.2.0", "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "dev": true, "engines": { "node": ">= 10" } }, "node_modules/swagger-ui-dist": { - "version": "3.47.1", - "resolved": "https://registry.npmjs.org/swagger-ui-dist/-/swagger-ui-dist-3.47.1.tgz", - "integrity": "sha512-7b9iHDC/GGC9SJLd3HiV/3EnsJ3wu7xN8Q4MpOPfQO8UG7TQFG2TMTDkvvy0SNeqxQY0tGQY0ppZC9a95tW3kg==" + "version": "3.48.0", + "resolved": "https://registry.npmjs.org/swagger-ui-dist/-/swagger-ui-dist-3.48.0.tgz", + "integrity": "sha512-UgpKIQW5RAb4nYRG8B615blmQzct0DNuvtX4904Fe2aMWAVfWeKHKl4kwzFXuBJgr2WYWTwM1PnhZ+qqkLrpPg==" }, "node_modules/symbol-tree": { "version": "3.2.4", @@ -12476,29 +12029,26 @@ "dev": true }, "node_modules/table": { - "version": "6.3.2", - "resolved": "https://registry.npmjs.org/table/-/table-6.3.2.tgz", - "integrity": "sha512-I9/Ca6Huf2oxFag7crD0DhA+arIdfLtWunSn0NIXSzjtUlDgIBGVZY7SsMkNPNT3Psd/z4gza0nuEpmra9eRbg==", + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/table/-/table-6.7.0.tgz", + "integrity": "sha512-SAM+5p6V99gYiiy2gT5ArdzgM1dLDed0nkrWmG6Fry/bUS/m9x83BwpJUOf1Qj/x2qJd+thL6IkIx7qPGRxqBw==", "dev": true, "dependencies": { "ajv": "^8.0.1", - "is-boolean-object": "^1.1.0", - "is-number-object": "^1.0.4", - "is-string": "^1.0.5", "lodash.clonedeep": "^4.5.0", - "lodash.flatten": "^4.4.0", "lodash.truncate": "^4.4.2", "slice-ansi": "^4.0.0", - "string-width": "^4.2.0" + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0" }, "engines": { "node": ">=10.0.0" } }, "node_modules/table/node_modules/ajv": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.1.0.tgz", - "integrity": "sha512-B/Sk2Ix7A36fs/ZkuGLIR86EdjbgR6fsAcbx9lOP/QBSXujDNbVmIS/U4Itz5k8fPFDeVZl/zQ/gJW4Jrq6XjQ==", + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.3.0.tgz", + "integrity": "sha512-RYE7B5An83d7eWnDR8kbdaIFqmKCNsP16ay1hDbJEU+sa0e3H9SebskCt0Uufem6cfAVu7Col6ubcn/W+Sm8/Q==", "dev": true, "dependencies": { "fast-deep-equal": "^3.1.1", @@ -12575,16 +12125,16 @@ } }, "node_modules/terser-webpack-plugin": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.1.1.tgz", - "integrity": "sha512-5XNNXZiR8YO6X6KhSGXfY0QrGrCRlSwAEjIIrlRQR4W8nP69TaJUlh3bkuac6zzgspiGPfKEHcY295MMVExl5Q==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.1.2.tgz", + "integrity": "sha512-6QhDaAiVHIQr5Ab3XUWZyDmrIPCHMiqJVljMF91YKyqwKkL5QHnYMkrMBy96v9Z7ev1hGhSEw1HQZc2p/s5Z8Q==", "dependencies": { "jest-worker": "^26.6.2", "p-limit": "^3.1.0", "schema-utils": "^3.0.0", "serialize-javascript": "^5.0.1", "source-map": "^0.6.1", - "terser": "^5.5.1" + "terser": "^5.7.0" }, "engines": { "node": ">= 10.13.0" @@ -12597,10 +12147,32 @@ "webpack": "^5.1.0" } }, + "node_modules/terser-webpack-plugin/node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/terser-webpack-plugin/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/terser-webpack-plugin/node_modules/terser": { - "version": "5.6.1", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.6.1.tgz", - "integrity": "sha512-yv9YLFQQ+3ZqgWCUk+pvNJwgUTdlIxUk1WTN+RnaFJe2L7ipG2csPT0ra2XRm7Cs8cxN7QXmK1rFzEwYEQkzXw==", + "version": "5.7.0", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.7.0.tgz", + "integrity": "sha512-HP5/9hp2UaZt5fYkuhNBR8YyRcT8juw8+uFbAme53iN9hblvKnLUTKkmwJG6ocWpIKf8UK4DoeWG4ty0J6S6/g==", "dependencies": { "commander": "^2.20.0", "source-map": "~0.7.2", @@ -12621,6 +12193,14 @@ "node": ">= 8" } }, + "node_modules/terser/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/test-exclude": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", @@ -12647,16 +12227,6 @@ "integrity": "sha512-fcwX4mndzpLQKBS1DVYhGAcYaYt7vsHNIvQV+WXMvnow5cgjPphq5CaayLaGsjRdSCKZFNGt7/GYAuXaNOiYCA==", "dev": true }, - "node_modules/timsort": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/timsort/-/timsort-0.3.0.tgz", - "integrity": "sha1-QFQRqOfmM5/mTbmiNN4R3DHgK9Q=" - }, - "node_modules/tiny-emitter": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/tiny-emitter/-/tiny-emitter-2.1.0.tgz", - "integrity": "sha512-NB6Dk1A9xgQPMoGqC5CVXn123gWyte215ONT5Pp5a0yt4nlEoO1ZWeCwpncaekPHXO60i47ihFnZPiRPjRMq4Q==" - }, "node_modules/tmpl": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.4.tgz", @@ -12878,9 +12448,9 @@ } }, "node_modules/type-fest": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-1.0.2.tgz", - "integrity": "sha512-a720oz3Kjbp3ll0zkeN9qjRhO7I34MKMhPGQiQJAmaZQZQ1lo+NWThK322f7sXV+kTg9B1Ybt16KgBXWgteT8w==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-1.1.1.tgz", + "integrity": "sha512-RPDKc5KrIyKTP7Fk75LruUagqG6b+OTgXlCR2Z0aQDJFeIvL4/mhahSEtHmmVzXu4gmA0srkF/8FCH3WOWxTWA==", "engines": { "node": ">=10" }, @@ -12903,9 +12473,9 @@ "integrity": "sha512-dELuLBVa2jvWdU/CHTKi2L/POYaRupv942k+vRsFXsM17acXesQGAiGCio82RW7fvcr7bkuD/Zj8XpUh6aPC2A==" }, "node_modules/uglify-js": { - "version": "3.13.4", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.13.4.tgz", - "integrity": "sha512-kv7fCkIXyQIilD5/yQy8O+uagsYIOt5cZvs890W40/e/rvjMSzJw81o9Bg0tkURxzZBROtDQhW2LFjOGoK3RZw==", + "version": "3.13.6", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.13.6.tgz", + "integrity": "sha512-rRprLwl8RVaS+Qvx3Wh5hPfPBn9++G6xkGlUupya0s5aDmNjI7z3lnRLB3u7sN4OmbB0pWgzhM9BEJyiWAwtAA==", "bin": { "uglifyjs": "bin/uglifyjs" }, @@ -12979,16 +12549,6 @@ "node": ">=0.10.0" } }, - "node_modules/uniq": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/uniq/-/uniq-1.0.1.tgz", - "integrity": "sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8=" - }, - "node_modules/uniqs": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/uniqs/-/uniqs-2.0.0.tgz", - "integrity": "sha1-/+3ks2slKQaW5uFl1KWe25mOawI=" - }, "node_modules/unist-util-find-all-after": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/unist-util-find-all-after/-/unist-util-find-all-after-3.0.2.tgz", @@ -13083,9 +12643,9 @@ } }, "node_modules/updates": { - "version": "12.0.2", - "resolved": "https://registry.npmjs.org/updates/-/updates-12.0.2.tgz", - "integrity": "sha512-Y5Gnb5MCW9tzjee5on+t4rLsM0gT13ZZ/Iy4wQFO4DdLh8p/XBQuStd4TSUB64kMfYXUIN+vb7wHs7w16KiNXw==", + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/updates/-/updates-12.1.0.tgz", + "integrity": "sha512-cC/jeGLoeMiu0NteTQsFZTQ9p1aLYs9uODV3HbS3Zx7fAk+dY0GsrUCC8C153szTH3X9NkPtYp0FpLLS2qIKMw==", "dev": true, "bin": { "updates": "updates.cjs" @@ -13155,9 +12715,9 @@ "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==" }, "node_modules/v8-to-istanbul": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-7.1.1.tgz", - "integrity": "sha512-p0BB09E5FRjx0ELN6RgusIPsSPhtgexSRcKETybEs6IGOTXJSZqfwxp7r//55nnu0f1AxltY5VvdVqy2vZf9AA==", + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-7.1.2.tgz", + "integrity": "sha512-TxNb7YEUwkLXCQYeudi6lgQ/SZrzNO4kMdlqVxaZPUIUjCv6iSSypUQX70kNBSERpQ8fk48+d61FXk+tgqcWow==", "dev": true, "dependencies": { "@types/istanbul-lib-coverage": "^2.0.1", @@ -13187,15 +12747,6 @@ "spdx-expression-parse": "^3.0.0" } }, - "node_modules/vendors": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/vendors/-/vendors-1.0.4.tgz", - "integrity": "sha512-/juG65kTL4Cy2su4P8HjtkTxk6VmJDiOPBufWniqQ6wknac6jNiXS9vU+hO3wgusiyqWlzTbVHi0dyJqRONg3w==", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, "node_modules/verror": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", @@ -13246,11 +12797,10 @@ "integrity": "sha512-uhmLFETqPPNyuLLbsKz6ioJ4q7AZHzD8ZVFNATNyICSZouqP2Sz0rotWQC8UNBF6VGSCs5abnKJoStA6JbCbfg==" }, "node_modules/vue-bar-graph": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/vue-bar-graph/-/vue-bar-graph-1.2.0.tgz", - "integrity": "sha512-vqe2KZPlm0JpKMxDlgTwGDYnLvN32dSLrGm4EC2ivuiZ2FJ8T8/mqi67XEeM0zkFKi9jmp7U09zKCYkg1ag+WQ==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/vue-bar-graph/-/vue-bar-graph-1.2.2.tgz", + "integrity": "sha512-XasqdeZeFAio2CgK04G3cHqY14Ogh/QibPX/uX1umkxLmguIKpSnU1cy8R8XQwmbIrS4c2v6Kp5c/A+QrBUfhg==", "dependencies": { - "core-js": "^3.6.1", "gsap": "^3.0.4", "vue": "^2.6.11" } @@ -13315,9 +12865,9 @@ "integrity": "sha512-BXq3jwIagosjgNVae6tkHzzIk6a8MHFtzAdwhnV5VlvPTFxDCvIttgSiHWjdGoTJvXtmRu5HacExfdarRcFhog==" }, "node_modules/vue-loader": { - "version": "15.9.6", - "resolved": "https://registry.npmjs.org/vue-loader/-/vue-loader-15.9.6.tgz", - "integrity": "sha512-j0cqiLzwbeImIC6nVIby2o/ABAWhlppyL/m5oJ67R5MloP0hj/DtFgb0Zmq3J9CG7AJ+AXIvHVnJAPBvrLyuDg==", + "version": "15.9.7", + "resolved": "https://registry.npmjs.org/vue-loader/-/vue-loader-15.9.7.tgz", + "integrity": "sha512-qzlsbLV1HKEMf19IqCJqdNvFJRCI58WNbS6XbPqK13MrLz65es75w392MSQ5TsARAfIjUw+ATm3vlCXUJSOH9Q==", "dependencies": { "@vue/component-compiler-utils": "^3.1.0", "hash-sum": "^1.0.2", @@ -13472,16 +13022,16 @@ } }, "node_modules/webpack": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.35.0.tgz", - "integrity": "sha512-au3gu55yYF/h6NXFr0KZPZAYxS6Nlc595BzYPke8n0CSff5WXcoixtjh5LC/8mXunkRKxhymhXmBY0+kEbR6jg==", + "version": "5.37.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.37.0.tgz", + "integrity": "sha512-yvdhgcI6QkQkDe1hINBAJ1UNevqNGTVaCkD2SSJcB8rcrNNl922RI8i2DXUAuNfANoxwsiXXEA4ZPZI9q2oGLA==", "dependencies": { "@types/eslint-scope": "^3.7.0", "@types/estree": "^0.0.47", "@webassemblyjs/ast": "1.11.0", "@webassemblyjs/wasm-edit": "1.11.0", "@webassemblyjs/wasm-parser": "1.11.0", - "acorn": "^8.0.4", + "acorn": "^8.2.1", "browserslist": "^4.14.5", "chrome-trace-event": "^1.0.2", "enhanced-resolve": "^5.8.0", @@ -13517,17 +13067,16 @@ } }, "node_modules/webpack-cli": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-4.6.0.tgz", - "integrity": "sha512-9YV+qTcGMjQFiY7Nb1kmnupvb1x40lfpj8pwdO/bom+sQiP4OBMKjHq29YQrlDWDPZO9r/qWaRRywKaRDKqBTA==", + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-4.7.0.tgz", + "integrity": "sha512-7bKr9182/sGfjFm+xdZSwgQuFjgEcy0iCTIBxRUeteJ2Kr8/Wz0qNJX+jw60LU36jApt4nmMkep6+W5AKhok6g==", "dependencies": { "@discoveryjs/json-ext": "^0.5.0", - "@webpack-cli/configtest": "^1.0.2", - "@webpack-cli/info": "^1.2.3", - "@webpack-cli/serve": "^1.3.1", + "@webpack-cli/configtest": "^1.0.3", + "@webpack-cli/info": "^1.2.4", + "@webpack-cli/serve": "^1.4.0", "colorette": "^1.2.1", "commander": "^7.0.0", - "enquirer": "^2.3.6", "execa": "^5.0.0", "fastest-levenshtein": "^1.0.12", "import-local": "^3.0.2", @@ -13633,10 +13182,18 @@ "node": ">=10.13.0" } }, + "node_modules/webpack-sources/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/webpack/node_modules/acorn": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.1.1.tgz", - "integrity": "sha512-xYiIVjNuqtKXMxlRMDc6mZUhXehod4a3gbZ1qRlM7icK4EbxUFNLhWoPblCvFtB2Y9CIqHP3CF/rdxLItaQv8g==", + "version": "8.2.4", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.2.4.tgz", + "integrity": "sha512-Ibt84YwBDDA890eDiDCEqcbwvHlBvzzDkU2cGBBDDI1QWT12jTiXIOn2CIw5KK4i6N5Z2HUxwYjzriDyqaqqZg==", "bin": { "acorn": "bin/acorn" }, @@ -13912,6 +13469,7 @@ "version": "1.10.2", "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "dev": true, "engines": { "node": ">= 6" } @@ -13996,31 +13554,32 @@ "version": "7.12.11", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz", "integrity": "sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==", + "dev": true, "requires": { "@babel/highlight": "^7.10.4" } }, "@babel/compat-data": { - "version": "7.13.15", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.13.15.tgz", - "integrity": "sha512-ltnibHKR1VnrU4ymHyQ/CXtNXI6yZC0oJThyW78Hft8XndANwi+9H+UIklBDraIjFEJzw8wmcM427oDd9KS5wA==", + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.14.0.tgz", + "integrity": "sha512-vu9V3uMM/1o5Hl5OekMUowo3FqXLJSw+s+66nt0fSWVWTtmosdzn45JHOB3cPtZoe6CTBDzvSw0RdOY85Q37+Q==", "dev": true }, "@babel/core": { - "version": "7.13.16", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.13.16.tgz", - "integrity": "sha512-sXHpixBiWWFti0AV2Zq7avpTasr6sIAu7Y396c608541qAU2ui4a193m0KSQmfPSKFZLnQ3cvlKDOm3XkuXm3Q==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.14.2.tgz", + "integrity": "sha512-OgC1mON+l4U4B4wiohJlQNUU3H73mpTyYY3j/c8U9dr9UagGGSm+WFpzjy/YLdoyjiG++c1kIDgxCo/mLwQJeQ==", "dev": true, "requires": { "@babel/code-frame": "^7.12.13", - "@babel/generator": "^7.13.16", + "@babel/generator": "^7.14.2", "@babel/helper-compilation-targets": "^7.13.16", - "@babel/helper-module-transforms": "^7.13.14", - "@babel/helpers": "^7.13.16", - "@babel/parser": "^7.13.16", + "@babel/helper-module-transforms": "^7.14.2", + "@babel/helpers": "^7.14.0", + "@babel/parser": "^7.14.2", "@babel/template": "^7.12.13", - "@babel/traverse": "^7.13.15", - "@babel/types": "^7.13.16", + "@babel/traverse": "^7.14.2", + "@babel/types": "^7.14.2", "convert-source-map": "^1.7.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -14043,32 +13602,20 @@ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "dev": true - }, - "source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "dev": true } } }, "@babel/eslint-parser": { - "version": "7.13.14", - "resolved": "https://registry.npmjs.org/@babel/eslint-parser/-/eslint-parser-7.13.14.tgz", - "integrity": "sha512-I0HweR36D73Ibn/FfrRDMKlMqJHFwidIUgYdMpH+aXYuQC+waq59YaJ6t9e9N36axJ82v1jR041wwqDrDXEwRA==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/eslint-parser/-/eslint-parser-7.14.2.tgz", + "integrity": "sha512-g1YXHASb84MvEkReG/nZ74emTPAMjip1Ey6azZqKTEWidpgEzPGl/uoc6IPJjaMGw424u40sNm1V70tuYOQmeA==", "dev": true, "requires": { "eslint-scope": "^5.1.0", - "eslint-visitor-keys": "^1.3.0", + "eslint-visitor-keys": "^2.1.0", "semver": "^6.3.0" }, "dependencies": { - "eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", - "dev": true - }, "semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", @@ -14078,22 +13625,14 @@ } }, "@babel/generator": { - "version": "7.13.16", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.13.16.tgz", - "integrity": "sha512-grBBR75UnKOcUWMp8WoDxNsWCFl//XCK6HWTrBQKTr5SV9f5g0pNOjdyzi/DTBv12S9GnYPInIXQBTky7OXEMg==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.14.2.tgz", + "integrity": "sha512-OnADYbKrffDVai5qcpkMxQ7caomHOoEwjkouqnN2QhydAjowFAZcsdecFIRUBdb+ZcruwYE4ythYmF1UBZU5xQ==", "dev": true, "requires": { - "@babel/types": "^7.13.16", + "@babel/types": "^7.14.2", "jsesc": "^2.5.1", "source-map": "^0.5.0" - }, - "dependencies": { - "source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "dev": true - } } }, "@babel/helper-compilation-targets": { @@ -14117,14 +13656,14 @@ } }, "@babel/helper-function-name": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.12.13.tgz", - "integrity": "sha512-TZvmPn0UOqmvi5G4vvw0qZTpVptGkB1GL61R6lKvrSdIxGm5Pky7Q3fpKiIkQCAtRCBUwB0PaThlx9vebCDSwA==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.14.2.tgz", + "integrity": "sha512-NYZlkZRydxw+YT56IlhIcS8PAhb+FEUiOzuhFTfqDyPmzAhRge6ua0dQYT/Uh0t/EDHq05/i+e5M2d4XvjgarQ==", "dev": true, "requires": { "@babel/helper-get-function-arity": "^7.12.13", "@babel/template": "^7.12.13", - "@babel/types": "^7.12.13" + "@babel/types": "^7.14.2" } }, "@babel/helper-get-function-arity": { @@ -14155,19 +13694,19 @@ } }, "@babel/helper-module-transforms": { - "version": "7.13.14", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.13.14.tgz", - "integrity": "sha512-QuU/OJ0iAOSIatyVZmfqB0lbkVP0kDRiKj34xy+QNsnVZi/PA6BoSoreeqnxxa9EHFAIL0R9XOaAR/G9WlIy5g==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.14.2.tgz", + "integrity": "sha512-OznJUda/soKXv0XhpvzGWDnml4Qnwp16GN+D/kZIdLsWoHj05kyu8Rm5kXmMef+rVJZ0+4pSGLkeixdqNUATDA==", "dev": true, "requires": { "@babel/helper-module-imports": "^7.13.12", "@babel/helper-replace-supers": "^7.13.12", "@babel/helper-simple-access": "^7.13.12", "@babel/helper-split-export-declaration": "^7.12.13", - "@babel/helper-validator-identifier": "^7.12.11", + "@babel/helper-validator-identifier": "^7.14.0", "@babel/template": "^7.12.13", - "@babel/traverse": "^7.13.13", - "@babel/types": "^7.13.14" + "@babel/traverse": "^7.14.2", + "@babel/types": "^7.14.2" } }, "@babel/helper-optimise-call-expression": { @@ -14216,9 +13755,10 @@ } }, "@babel/helper-validator-identifier": { - "version": "7.12.11", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz", - "integrity": "sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw==" + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.0.tgz", + "integrity": "sha512-V3ts7zMSu5lfiwWDVWzRDGIN+lnCEUdaXgtVHJgLb1rGaA6jMrtB9EmE7L18foXJIE8Un/A/h6NJfGQp/e1J4A==", + "dev": true }, "@babel/helper-validator-option": { "version": "7.12.17", @@ -14227,22 +13767,23 @@ "dev": true }, "@babel/helpers": { - "version": "7.13.17", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.13.17.tgz", - "integrity": "sha512-Eal4Gce4kGijo1/TGJdqp3WuhllaMLSrW6XcL0ulyUAQOuxHcCafZE8KHg9857gcTehsm/v7RcOx2+jp0Ryjsg==", + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.14.0.tgz", + "integrity": "sha512-+ufuXprtQ1D1iZTO/K9+EBRn+qPWMJjZSw/S0KlFrxCw4tkrzv9grgpDHkY9MeQTjTY8i2sp7Jep8DfU6tN9Mg==", "dev": true, "requires": { "@babel/template": "^7.12.13", - "@babel/traverse": "^7.13.17", - "@babel/types": "^7.13.17" + "@babel/traverse": "^7.14.0", + "@babel/types": "^7.14.0" } }, "@babel/highlight": { - "version": "7.13.10", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.13.10.tgz", - "integrity": "sha512-5aPpe5XQPzflQrFwL1/QoeHkP2MsA4JCntcXHRhEsdsfPVkvPi2w7Qix4iV7t5S/oC9OodGrggd8aco1g3SZFg==", + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.14.0.tgz", + "integrity": "sha512-YSCOwxvTYEIMSGaBQb5kDDsCopDdiUGsqpatp3fOlI4+2HQSkTmEVWnVuySdAC5EWCqSWWTv0ib63RjR7dTBdg==", + "dev": true, "requires": { - "@babel/helper-validator-identifier": "^7.12.11", + "@babel/helper-validator-identifier": "^7.14.0", "chalk": "^2.0.0", "js-tokens": "^4.0.0" }, @@ -14251,6 +13792,7 @@ "version": "3.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, "requires": { "color-convert": "^1.9.0" } @@ -14259,6 +13801,7 @@ "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, "requires": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", @@ -14269,6 +13812,7 @@ "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, "requires": { "color-name": "1.1.3" } @@ -14276,17 +13820,20 @@ "color-name": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true }, "supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, "requires": { "has-flag": "^3.0.0" } @@ -14294,9 +13841,9 @@ } }, "@babel/parser": { - "version": "7.13.16", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.13.16.tgz", - "integrity": "sha512-6bAg36mCwuqLO0hbR+z7PHuqWiCeP7Dzg73OpQwsAB1Eb8HnGEz5xYBzCfbu+YjoaJsJs+qheDxVAuqbt3ILEw==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.14.2.tgz", + "integrity": "sha512-IoVDIHpsgE/fu7eXBeRWt8zLbDrSvD7H1gpomOkPpBoEN8KCruCqSDdqo8dddwQQrui30KSvQBaMUOJiuFu6QQ==", "dev": true }, "@babel/plugin-syntax-async-generators": { @@ -14408,9 +13955,9 @@ } }, "@babel/runtime": { - "version": "7.13.17", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.13.17.tgz", - "integrity": "sha512-NCdgJEelPTSh+FEFylhnP1ylq848l1z9t9N0j1Lfbcw0+KXGjsTvUmkxy+voLLXB5SOKMbLLx4jxYliGrYQseA==", + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.14.0.tgz", + "integrity": "sha512-JELkvo/DlpNdJ7dlyw/eY7E0suy5i5GQH+Vlxaq1nsNJ+H7f4Vtv3jMeCEgRhZZQFXTjldYfQgv2qmM6M1v5wA==", "requires": { "regenerator-runtime": "^0.13.4" } @@ -14438,17 +13985,17 @@ } }, "@babel/traverse": { - "version": "7.13.17", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.13.17.tgz", - "integrity": "sha512-BMnZn0R+X6ayqm3C3To7o1j7Q020gWdqdyP50KEoVqaCO2c/Im7sYZSmVgvefp8TTMQ+9CtwuBp0Z1CZ8V3Pvg==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.14.2.tgz", + "integrity": "sha512-TsdRgvBFHMyHOOzcP9S6QU0QQtjxlRpEYOy3mcCO5RgmC305ki42aSAmfZEMSSYBla2oZ9BMqYlncBaKmD/7iA==", "dev": true, "requires": { "@babel/code-frame": "^7.12.13", - "@babel/generator": "^7.13.16", - "@babel/helper-function-name": "^7.12.13", + "@babel/generator": "^7.14.2", + "@babel/helper-function-name": "^7.14.2", "@babel/helper-split-export-declaration": "^7.12.13", - "@babel/parser": "^7.13.16", - "@babel/types": "^7.13.17", + "@babel/parser": "^7.14.2", + "@babel/types": "^7.14.2", "debug": "^4.1.0", "globals": "^11.1.0" }, @@ -14471,12 +14018,12 @@ } }, "@babel/types": { - "version": "7.13.17", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.13.17.tgz", - "integrity": "sha512-RawydLgxbOPDlTLJNtoIypwdmAy//uQIzlKt2+iBiJaRlVuI6QLUxVAyWGNfOzp8Yu4L4lLIacoCyTNtpb4wiA==", + "version": "7.14.2", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.14.2.tgz", + "integrity": "sha512-SdjAG/3DikRHpUOjxZgnkbR11xUlyDMUFJdvnIgZEE16mqmY0BINMmc4//JMJglEmn6i7sq6p+mGrFWyZ98EEw==", "dev": true, "requires": { - "@babel/helper-validator-identifier": "^7.12.11", + "@babel/helper-validator-identifier": "^7.14.0", "to-fast-properties": "^2.0.0" } }, @@ -14513,9 +14060,9 @@ "integrity": "sha512-HyYEUDeIj5rRQU2Hk5HTB2uHsbRQpF70nvMhVzi+VJR0X+xNEhjPui4/kBf3VeH/wqD28PT4sVOm8qqLjBrSZg==" }, "@eslint/eslintrc": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.0.tgz", - "integrity": "sha512-2ZPCc+uNbjV5ERJr+aKSPRwZgKd2z11x0EgLvb1PURmUrn9QNRXFqje0Ldq454PfAVyaJYyrDvvIKSFP4NnBog==", + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.1.tgz", + "integrity": "sha512-5v7TDE9plVhvxQeWLXDTvFvJBdH6pEsdnl2g/dAptmuFEPedQ4Erq5rsDsX+mvAM610IhNaO2W5V1dOOnDKxkQ==", "dev": true, "requires": { "ajv": "^6.12.4", @@ -14725,6 +14272,14 @@ "string-length": "^4.0.1", "terminal-link": "^2.0.0", "v8-to-istanbul": "^7.0.0" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } } }, "@jest/source-map": { @@ -14736,6 +14291,14 @@ "callsites": "^3.0.0", "graceful-fs": "^4.2.4", "source-map": "^0.6.0" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } } }, "@jest/test-result": { @@ -14784,6 +14347,14 @@ "slash": "^3.0.0", "source-map": "^0.6.1", "write-file-atomic": "^3.0.0" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } } }, "@jest/types": { @@ -14870,7 +14441,8 @@ "@trysound/sax": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/@trysound/sax/-/sax-0.1.1.tgz", - "integrity": "sha512-Z6DoceYb/1xSg5+e+ZlPZ9v0N16ZvZ+wYMraFue4HYrE4ttONKtsvruIRf6t9TBR0YvSOfi1hUU0fJfBLCDYow==" + "integrity": "sha512-Z6DoceYb/1xSg5+e+ZlPZ9v0N16ZvZ+wYMraFue4HYrE4ttONKtsvruIRf6t9TBR0YvSOfi1hUU0fJfBLCDYow==", + "dev": true }, "@types/babel__core": { "version": "7.1.14", @@ -14913,6 +14485,14 @@ "@babel/types": "^7.3.0" } }, + "@types/codemirror": { + "version": "0.0.109", + "resolved": "https://registry.npmjs.org/@types/codemirror/-/codemirror-0.0.109.tgz", + "integrity": "sha512-cSdiHeeLjvGn649lRTNeYrVCDOgDrtP+bDDSFDd1TF+i0jKGPDRozno2NOJ9lTniso+taiv4kiVS8dgM8Jm5lg==", + "requires": { + "@types/tern": "*" + } + }, "@types/eslint": { "version": "7.2.10", "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-7.2.10.tgz", @@ -14980,6 +14560,11 @@ "integrity": "sha1-7ihweulOEdK4J7y+UnC86n8+ce4=", "dev": true }, + "@types/marked": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@types/marked/-/marked-2.0.2.tgz", + "integrity": "sha512-P4zanhCQKs4tiWPPBGpB7lHflgFCP9DFGNI5YtpW9MALKoy2qs9rHNWJ+z55cegD9uCfnmsKuaosq9FNvbxrOw==" + }, "@types/mdast": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.3.tgz", @@ -14996,9 +14581,9 @@ "dev": true }, "@types/node": { - "version": "14.14.41", - "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.41.tgz", - "integrity": "sha512-dueRKfaJL4RTtSa7bWeTK1M+VH+Gns73oCgzvYfHZywRCoPSd8EkXBL0mZ9unPTveBn+D9phZBaxuzpwjWkW0g==" + "version": "15.0.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-15.0.3.tgz", + "integrity": "sha512-/WbxFeBU+0F79z9RdEOXH4CsDga+ibi5M8uEYr91u3CkT/pdWcV8MCook+4wDPnZBexRdwWS+PiVZ2xJviAzcQ==" }, "@types/normalize-package-data": { "version": "2.4.0", @@ -15009,7 +14594,8 @@ "@types/parse-json": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz", - "integrity": "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==" + "integrity": "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==", + "dev": true }, "@types/prettier": { "version": "2.2.3", @@ -15023,6 +14609,14 @@ "integrity": "sha512-RJJrrySY7A8havqpGObOB4W92QXKJo63/jFLLgpvOtsGUqbQZ9Sbgl35KMm1DjC6j7AvmmU2bIno+3IyEaemaw==", "dev": true }, + "@types/tern": { + "version": "0.23.3", + "resolved": "https://registry.npmjs.org/@types/tern/-/tern-0.23.3.tgz", + "integrity": "sha512-imDtS4TAoTcXk0g7u4kkWqedB3E4qpjXzCpD2LU5M5NAXHzCDsypyvXSaG7mM8DKYkCRa7tFp4tS/lp/Wo7Q3w==", + "requires": { + "@types/estree": "*" + } + }, "@types/unist": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.3.tgz", @@ -15125,6 +14719,11 @@ "supports-color": "^6.1.0" } }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + }, "supports-color": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", @@ -15272,23 +14871,23 @@ } }, "@webpack-cli/configtest": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-1.0.2.tgz", - "integrity": "sha512-3OBzV2fBGZ5TBfdW50cha1lHDVf9vlvRXnjpVbJBa20pSZQaSkMJZiwA8V2vD9ogyeXn8nU5s5A6mHyf5jhMzA==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-1.0.3.tgz", + "integrity": "sha512-WQs0ep98FXX2XBAfQpRbY0Ma6ADw8JR6xoIkaIiJIzClGOMqVRvPCWqndTxf28DgFopWan0EKtHtg/5W1h0Zkw==", "requires": {} }, "@webpack-cli/info": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-1.2.3.tgz", - "integrity": "sha512-lLek3/T7u40lTqzCGpC6CAbY6+vXhdhmwFRxZLMnRm6/sIF/7qMpT8MocXCRQfz0JAh63wpbXLMnsQ5162WS7Q==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-1.2.4.tgz", + "integrity": "sha512-ogE2T4+pLhTTPS/8MM3IjHn0IYplKM4HbVNMCWA9N4NrdPzunwenpCsqKEXyejMfRu6K8mhauIPYf8ZxWG5O6g==", "requires": { "envinfo": "^7.7.3" } }, "@webpack-cli/serve": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-1.3.1.tgz", - "integrity": "sha512-0qXvpeYO6vaNoRBI52/UsbcaBydJCggoBBnIo/ovQQdn6fug0BgwsjorV1hVS7fMqGVTZGcVxv8334gjmbj5hw==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-1.4.0.tgz", + "integrity": "sha512-xgT/HqJ+uLWGX+Mzufusl3cgjAcnqYYskaB7o0vRcwOEfuu6hMzSILQpnIzFMGsTaeaX4Nnekl+6fadLbl1/Vg==", "requires": {} }, "@xtuc/ieee754": { @@ -15368,15 +14967,11 @@ "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", "requires": {} }, - "alphanum-sort": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/alphanum-sort/-/alphanum-sort-1.0.2.tgz", - "integrity": "sha1-l6ERlkmyEa0zaR2fn0hqjsn74KM=" - }, "ansi-colors": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", - "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==" + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true }, "ansi-escapes": { "version": "4.3.2", @@ -15609,6 +15204,12 @@ "supports-color": "^6.1.0" } }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, "supports-color": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", @@ -15762,7 +15363,8 @@ "boolbase": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", - "integrity": "sha1-aN/1++YMUes3cl6p4+0xDcwed24=" + "integrity": "sha1-aN/1++YMUes3cl6p4+0xDcwed24=", + "dev": true }, "brace-expansion": { "version": "1.1.11", @@ -15788,13 +15390,13 @@ "dev": true }, "browserslist": { - "version": "4.16.5", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.16.5.tgz", - "integrity": "sha512-C2HAjrM1AI/djrpAUU/tr4pml1DqLIzJKSLDBXBrNErl9ZCCTXdhwxdJjYc16953+mBWf7Lw+uUJgpgb8cN71A==", + "version": "4.16.6", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.16.6.tgz", + "integrity": "sha512-Wspk/PqO+4W9qp5iUTJsa1B/QrYn1keNCcEP5OvP7WBwT4KaDly0uONYmC6Xa3Z5IqnUgS0KcgLYu1l74x0ZXQ==", "requires": { - "caniuse-lite": "^1.0.30001214", + "caniuse-lite": "^1.0.30001219", "colorette": "^1.2.2", - "electron-to-chromium": "^1.3.719", + "electron-to-chromium": "^1.3.723", "escalade": "^3.1.1", "node-releases": "^1.1.71" } @@ -15813,6 +15415,12 @@ "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==" }, + "builtin-modules": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.2.0.tgz", + "integrity": "sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA==", + "dev": true + }, "cache-base": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz", @@ -15843,7 +15451,8 @@ "callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==" + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true }, "camel-case": { "version": "3.0.0", @@ -15878,21 +15487,10 @@ } } }, - "caniuse-api": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/caniuse-api/-/caniuse-api-3.0.0.tgz", - "integrity": "sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==", - "requires": { - "browserslist": "^4.0.0", - "caniuse-lite": "^1.0.0", - "lodash.memoize": "^4.1.2", - "lodash.uniq": "^4.5.0" - } - }, "caniuse-lite": { - "version": "1.0.30001214", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001214.tgz", - "integrity": "sha512-O2/SCpuaU3eASWVaesQirZv1MSjUNOvmugaD8zNSJqw6Vv5SGwoOpA9LJs3pNPfM745nxqPvfZY3MQKY4AKHYg==" + "version": "1.0.30001228", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001228.tgz", + "integrity": "sha512-QQmLOGJ3DEgokHbMSA8cj2a+geXqmnpyOFT0lhQV6P3/YOJvGDEwoedcwxEQ30gJIwIIunHIicunJ2rzK5gB2A==" }, "capture-exit": { "version": "2.0.0", @@ -15913,6 +15511,7 @@ "version": "4.1.1", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.1.tgz", "integrity": "sha512-diHzdDKxcU+bAsUboHLPEDQiw0qEe0qd7SYUn3HgcFlWgbDcfLGswOHYeGrHKzG9z6UYf01d9VFMfZxPM1xZSg==", + "dev": true, "requires": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -16057,6 +15656,13 @@ "integrity": "sha512-VcMWDN54ZN/DS+g58HYL5/n4Zrqe8vHJpGA8KdgUXFU4fuP/aHNw8eld9SyEIyabIMJX/0RaY/fplOo5hYLSFA==", "requires": { "source-map": "~0.6.0" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + } } }, "clean-regexp": { @@ -16068,16 +15674,6 @@ "escape-string-regexp": "^1.0.5" } }, - "clipboard": { - "version": "2.0.8", - "resolved": "https://registry.npmjs.org/clipboard/-/clipboard-2.0.8.tgz", - "integrity": "sha512-Y6WO0unAIQp5bLmk1zdThRhgJt/x3ks6f30s3oE3H1mgIEU33XyQjEf8gsf6DxC7NPX8Y1SsNWjUjL/ywLnnbQ==", - "requires": { - "good-listener": "^1.2.2", - "select": "^1.1.2", - "tiny-emitter": "^2.0.0" - } - }, "cliui": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", @@ -16156,30 +15752,6 @@ "object-visit": "^1.0.0" } }, - "color": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/color/-/color-3.1.3.tgz", - "integrity": "sha512-xgXAcTHa2HeFCGLE9Xs/R82hujGtu9Jd9x4NW3T34+OMs7VoPsjwzRczKHvTAHeJwWFwX5j15+MgAppE8ztObQ==", - "requires": { - "color-convert": "^1.9.1", - "color-string": "^1.5.4" - }, - "dependencies": { - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "requires": { - "color-name": "1.1.3" - } - }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" - } - } - }, "color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -16193,15 +15765,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "color-string": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.5.5.tgz", - "integrity": "sha512-jgIoum0OfQfq9Whcfc2z/VhCNcmQjWbey6qBX0vqt7YICflUmBCh9E9CiQD5GSJ+Uehixm3NUwHVhqUAWRivZg==", - "requires": { - "color-name": "^1.0.0", - "simple-swizzle": "^0.2.2" - } - }, "colorette": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/colorette/-/colorette-1.2.2.tgz", @@ -16269,11 +15832,6 @@ "integrity": "sha1-Z29us8OZl8LuGsOpJP1hJHSPV40=", "dev": true }, - "core-js": { - "version": "3.11.0", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.11.0.tgz", - "integrity": "sha512-bd79DPpx+1Ilh9+30aT5O1sgpQd4Ttg8oqkqi51ZzhedMM1omD2e6IOF48Z/DzDCZ2svp49tN/3vneTK6ZBkXw==" - }, "core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", @@ -16284,12 +15842,33 @@ "version": "7.0.0", "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.0.0.tgz", "integrity": "sha512-pondGvTuVYDk++upghXJabWzL6Kxu6f26ljFw64Swq9v6sQPUL3EUlVDV56diOjpCayKihL6hVe8exIACU4XcA==", + "dev": true, "requires": { "@types/parse-json": "^4.0.0", "import-fresh": "^3.2.1", "parse-json": "^5.0.0", "path-type": "^4.0.0", "yaml": "^1.10.0" + }, + "dependencies": { + "parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + } + }, + "path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true + } } }, "cross-spawn": { @@ -16307,19 +15886,6 @@ "resolved": "https://registry.npmjs.org/css-b64-images/-/css-b64-images-0.2.5.tgz", "integrity": "sha1-QgBdgyBLK0pdk7axpWRBM7WSegI=" }, - "css-color-names": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/css-color-names/-/css-color-names-1.0.1.tgz", - "integrity": "sha512-/loXYOch1qU1biStIFsHH8SxTmOseh1IJqFvy8IujXOm1h+QjUdDhkzOrR5HG8K8mlxREj0yfi8ewCHx0eMxzA==" - }, - "css-declaration-sorter": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-6.0.0.tgz", - "integrity": "sha512-S0TE4E0ha5+tBHdLWPc5n+S8E4dFBS5xScPvgHkLNZwWvX4ISoFGhGeerLC9uS1cKA/sC+K2wHq6qEbcagT/fg==", - "requires": { - "timsort": "^0.3.0" - } - }, "css-loader": { "version": "5.2.4", "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-5.2.4.tgz", @@ -16338,24 +15904,11 @@ "semver": "^7.3.5" } }, - "css-minimizer-webpack-plugin": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-2.0.0.tgz", - "integrity": "sha512-cG/uc94727tx5pBNtb1Sd7gvUPzwmcQi1lkpfqTpdkuNq75hJCw7bIVsCNijLm4dhDcr1atvuysl2rZqOG8Txw==", - "requires": { - "cssnano": "^5.0.0", - "jest-worker": "^26.3.0", - "p-limit": "^3.0.2", - "postcss": "^8.2.9", - "schema-utils": "^3.0.0", - "serialize-javascript": "^5.0.1", - "source-map": "^0.6.1" - } - }, "css-select": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/css-select/-/css-select-3.1.2.tgz", "integrity": "sha512-qmss1EihSuBNWNNhHjxzxSfJoFBM/lERB/Q4EnsJQQC62R2evJDW481091oAdOr9uh46/0n4nrg0It5cAnj1RA==", + "dev": true, "requires": { "boolbase": "^1.0.0", "css-what": "^4.0.0", @@ -16368,77 +15921,36 @@ "version": "1.1.3", "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.1.3.tgz", "integrity": "sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q==", + "dev": true, "requires": { "mdn-data": "2.0.14", "source-map": "^0.6.1" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } } }, "css-what": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/css-what/-/css-what-4.0.0.tgz", - "integrity": "sha512-teijzG7kwYfNVsUh2H/YN62xW3KK9YhXEgSlbxMlcyjPNvdKJqFx5lrwlJgoFP1ZHlB89iGDlo/JyshKeRhv5A==" + "integrity": "sha512-teijzG7kwYfNVsUh2H/YN62xW3KK9YhXEgSlbxMlcyjPNvdKJqFx5lrwlJgoFP1ZHlB89iGDlo/JyshKeRhv5A==", + "dev": true }, "cssesc": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==" }, - "cssnano": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-5.0.1.tgz", - "integrity": "sha512-5WubEmKcK2cqw43DUAayRBiIlTdX7iX3ZowrWDVxSVcW3hyohVnbJ4K4mbnWtJp5rfJnUwHg5H4mDAGzmuCM3g==", - "requires": { - "cosmiconfig": "^7.0.0", - "cssnano-preset-default": "^5.0.0", - "is-resolvable": "^1.1.0" - } - }, - "cssnano-preset-default": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-5.0.0.tgz", - "integrity": "sha512-zsLppqF7PxY6Tk+ghVx8djf4o1jIOu2GNufqy9lMxldt7gGpSy3FQ6jn7FCd5DZWCaBa7A/1/HVh8CK3BdFSJg==", - "requires": { - "css-declaration-sorter": "6.0.0", - "cssnano-utils": "^2.0.0", - "postcss-calc": "^8.0.0", - "postcss-colormin": "^5.0.0", - "postcss-convert-values": "^5.0.0", - "postcss-discard-comments": "^5.0.0", - "postcss-discard-duplicates": "^5.0.0", - "postcss-discard-empty": "^5.0.0", - "postcss-discard-overridden": "^5.0.0", - "postcss-merge-longhand": "^5.0.0", - "postcss-merge-rules": "^5.0.0", - "postcss-minify-font-values": "^5.0.0", - "postcss-minify-gradients": "^5.0.0", - "postcss-minify-params": "^5.0.0", - "postcss-minify-selectors": "^5.0.0", - "postcss-normalize-charset": "^5.0.0", - "postcss-normalize-display-values": "^5.0.0", - "postcss-normalize-positions": "^5.0.0", - "postcss-normalize-repeat-style": "^5.0.0", - "postcss-normalize-string": "^5.0.0", - "postcss-normalize-timing-functions": "^5.0.0", - "postcss-normalize-unicode": "^5.0.0", - "postcss-normalize-url": "^5.0.0", - "postcss-normalize-whitespace": "^5.0.0", - "postcss-ordered-values": "^5.0.0", - "postcss-reduce-initial": "^5.0.0", - "postcss-reduce-transforms": "^5.0.0", - "postcss-svgo": "^5.0.0", - "postcss-unique-selectors": "^5.0.0" - } - }, - "cssnano-utils": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-2.0.0.tgz", - "integrity": "sha512-xvxmTszdrvSyTACdPe8VU5J6p4sm3egpgw54dILvNqt5eBUv6TFjACLhSxtRuEsxYrgy8uDy269YjScO5aKbGA==", - "requires": {} - }, "csso": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/csso/-/csso-4.2.0.tgz", "integrity": "sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA==", + "dev": true, "requires": { "css-tree": "^1.1.2" } @@ -16856,11 +16368,6 @@ "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", "dev": true }, - "delegate": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/delegate/-/delegate-3.2.0.tgz", - "integrity": "sha512-IofjkYBZaZivn0V8nnsMJGBr4jVLxHDheKSW88PyxS5QC4Vo9ZbZVvhzlSxY87fVq3STR6r+4cGepyHkcWOQSw==" - }, "detect-newline": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", @@ -16880,6 +16387,14 @@ "dev": true, "requires": { "path-type": "^4.0.0" + }, + "dependencies": { + "path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true + } } }, "doctrine": { @@ -16895,6 +16410,7 @@ "version": "1.3.1", "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.3.1.tgz", "integrity": "sha512-Pv2ZluG5ife96udGgEDovOOOA5UELkltfJpnIExPrAk1LTvecolUGn6lIaoLh86d83GiB86CjzciMd9BuRB71Q==", + "dev": true, "requires": { "domelementtype": "^2.0.1", "domhandler": "^4.0.0", @@ -16904,7 +16420,8 @@ "domelementtype": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.2.0.tgz", - "integrity": "sha512-DtBMo82pv1dFtUmHyr48beiuq792Sxohr+8Hm9zoxklYPfa6n0Z3Byjj2IV7bmr2IyqClnqEQhfgHJJ5QF0R5A==" + "integrity": "sha512-DtBMo82pv1dFtUmHyr48beiuq792Sxohr+8Hm9zoxklYPfa6n0Z3Byjj2IV7bmr2IyqClnqEQhfgHJJ5QF0R5A==", + "dev": true }, "domexception": { "version": "2.0.1", @@ -16927,6 +16444,7 @@ "version": "4.2.0", "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-4.2.0.tgz", "integrity": "sha512-zk7sgt970kzPks2Bf+dwT/PLzghLnsivb9CcxkvR8Mzr66Olr0Ofd8neSbglHJHaHa2MadfoSdNlKYAaafmWfA==", + "dev": true, "requires": { "domelementtype": "^2.2.0" } @@ -16935,33 +16453,28 @@ "version": "2.6.0", "resolved": "https://registry.npmjs.org/domutils/-/domutils-2.6.0.tgz", "integrity": "sha512-y0BezHuy4MDYxh6OvolXYsH+1EMGmFbwv5FKW7ovwMG6zTPWqNPq3WF9ayZssFq+UlKdffGLbOEaghNdaOm1WA==", + "dev": true, "requires": { "dom-serializer": "^1.0.1", "domelementtype": "^2.2.0", "domhandler": "^4.2.0" } }, - "dot-prop": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", - "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", - "requires": { - "is-obj": "^2.0.0" - } - }, "dropzone": { "version": "5.9.2", "resolved": "https://registry.npmjs.org/dropzone/-/dropzone-5.9.2.tgz", "integrity": "sha512-5t2z51DzIsWDbTpwcJIvUlwxBbvcwdCApz0yb9ecKJwG155Xm92KMEZmHW1B0MzoXOKvFwdd0nPu5cpeVcvPHQ==" }, "easymde": { - "version": "2.14.0", - "resolved": "https://registry.npmjs.org/easymde/-/easymde-2.14.0.tgz", - "integrity": "sha512-yQh3EF1amknaxDhXE1L28kwknREU8S19o01ki0t6Q8ThECCipXTOM3E/LL32Ia5D3AsCBRbC1/fT5tpLniVGuw==", + "version": "2.15.0", + "resolved": "https://registry.npmjs.org/easymde/-/easymde-2.15.0.tgz", + "integrity": "sha512-9jMRIVvKt1d0UjRN45yotUYECAM4xvw0TTAQw8sYDONP++keWJVnd8Xrn+V+vQEN/v9/X0SWEoo1rFSgCooGpw==", "requires": { - "codemirror": "^5.59.2", + "@types/codemirror": "0.0.109", + "@types/marked": "^2.0.2", + "codemirror": "^5.61.0", "codemirror-spell-checker": "1.1.2", - "marked": "^2.0.0" + "marked": "^2.0.3" } }, "ecc-jsbn": { @@ -16986,9 +16499,9 @@ } }, "electron-to-chromium": { - "version": "1.3.719", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.719.tgz", - "integrity": "sha512-heM78GKSqrIzO9Oz0/y22nTBN7bqSP1Pla2SyU9DiSnQD+Ea9SyyN5RWWlgqsqeBLNDkSlE9J9EHFmdMPzxB/g==" + "version": "1.3.727", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.727.tgz", + "integrity": "sha512-Mfz4FIB4FSvEwBpDfdipRIrwd6uo8gUDoRDF4QEYb4h4tSuI3ov594OrjU6on042UlFHouIJpClDODGkPcBSbg==" }, "emittery": { "version": "0.7.2", @@ -17016,9 +16529,9 @@ } }, "enhanced-resolve": { - "version": "5.8.0", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.8.0.tgz", - "integrity": "sha512-Sl3KRpJA8OpprrtaIswVki3cWPiPKxXuFxJXBp+zNb6s6VwNWwFRUdtmzd2ReUut8n+sCPx7QCtQ7w5wfJhSgQ==", + "version": "5.8.2", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.8.2.tgz", + "integrity": "sha512-F27oB3WuHDzvR2DOGNTaYy0D5o0cnrv8TeI482VM4kYgQd/FT9lUQwuNsJ0oOHtBUq7eiW5ytqzp7nBFknL+GA==", "requires": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" @@ -17028,6 +16541,7 @@ "version": "2.3.6", "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", + "dev": true, "requires": { "ansi-colors": "^4.1.1" } @@ -17035,7 +16549,8 @@ "entities": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz", - "integrity": "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==" + "integrity": "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==", + "dev": true }, "entity-decode": { "version": "2.0.2", @@ -17063,6 +16578,7 @@ "version": "1.3.2", "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, "requires": { "is-arrayish": "^0.2.1" } @@ -17108,16 +16624,16 @@ } }, "esbuild": { - "version": "0.10.2", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.10.2.tgz", - "integrity": "sha512-/5vsZD7wTJJHC3yNXLUjXNvUDwqwNoIMvFvLd9tcDQ9el5l13pspYm3yufavjIeYvNtAbo+6N/6uoWx9dGA6ug==" + "version": "0.11.20", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.11.20.tgz", + "integrity": "sha512-QOZrVpN/Yz74xfat0H6euSgn3RnwLevY1mJTEXneukz1ln9qB+ieaerRMzSeETpz/UJWsBMzRVR/andBht5WKw==" }, "esbuild-loader": { - "version": "2.12.0", - "resolved": "https://registry.npmjs.org/esbuild-loader/-/esbuild-loader-2.12.0.tgz", - "integrity": "sha512-wQJ8tryOYC5CsG62scwX92HIlY0kgH+28xPz+3pxGZlexkQJYZF0kN97iVR6pyzGzfGhTPD7pabdqVnYF7HMVw==", + "version": "2.13.0", + "resolved": "https://registry.npmjs.org/esbuild-loader/-/esbuild-loader-2.13.0.tgz", + "integrity": "sha512-gC9lML8RGkTSWG2pJVEOZRLMoIluq1Jd7OzzVkOZKMzbMDMWDhXEwXLs60n+aglnAYa9GVrD/UXjTHkM51nBsg==", "requires": { - "esbuild": "^0.10.2", + "esbuild": "^0.11.19", "joycon": "^3.0.1", "json5": "^2.2.0", "loader-utils": "^2.0.0", @@ -17189,6 +16705,13 @@ "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", "dev": true }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "optional": true + }, "type-check": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", @@ -17201,13 +16724,13 @@ } }, "eslint": { - "version": "7.24.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.24.0.tgz", - "integrity": "sha512-k9gaHeHiFmGCDQ2rEfvULlSLruz6tgfA8DEn+rY9/oYPFFTlz55mM/Q/Rij1b2Y42jwZiK3lXvNTw6w6TXzcKQ==", + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.26.0.tgz", + "integrity": "sha512-4R1ieRf52/izcZE7AlLy56uIHHDLT74Yzz2Iv2l6kDaYvEu9x+wMB5dZArVL8SYGXSYV2YAg70FcW5Y5nGGNIg==", "dev": true, "requires": { "@babel/code-frame": "7.12.11", - "@eslint/eslintrc": "^0.4.0", + "@eslint/eslintrc": "^0.4.1", "ajv": "^6.10.0", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", @@ -17357,17 +16880,17 @@ } }, "eslint-plugin-unicorn": { - "version": "30.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-30.0.0.tgz", - "integrity": "sha512-ZKbE48Ep99z/3geLpkBfv+jNrzr2k7bLqCC/RfZOekZzAvn2/ECDE/d8zGdW1YxHmIC9pevQvm8Pl89v9GEIVw==", + "version": "32.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-32.0.1.tgz", + "integrity": "sha512-LaZ9utnXtOJjnoDkpm+nQsONUUmyRR0WD6PGROSdQRRW3LRmgK/ZP8wxjW+Ai+2uolKTtuJzLx2mvbIeIoLqpg==", "dev": true, "requires": { "ci-info": "^3.1.1", "clean-regexp": "^1.0.0", "eslint-template-visitor": "^2.3.2", "eslint-utils": "^2.1.0", - "eslint-visitor-keys": "^2.0.0", "import-modules": "^2.1.0", + "is-builtin-module": "^3.1.0", "lodash": "^4.17.21", "pluralize": "^8.0.0", "read-pkg-up": "^7.0.1", @@ -17377,6 +16900,18 @@ "semver": "^7.3.5" }, "dependencies": { + "parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + } + }, "read-pkg": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", @@ -17468,9 +17003,9 @@ } }, "eslint-visitor-keys": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.0.0.tgz", - "integrity": "sha512-QudtT6av5WXels9WjIM7qz1XD1cWGvX4gGXvp/zBn9nXG02D0utdU3Em2m/QjTnrsk6bBjmCygl3rmj118msQQ==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", + "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", "dev": true }, "espree": { @@ -18020,6 +17555,12 @@ "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", "dev": true }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, "stack-utils": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-1.0.5.tgz", @@ -18344,9 +17885,9 @@ } }, "glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "version": "7.1.7", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", + "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", "requires": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -18454,14 +17995,6 @@ "minimist": "^1.2.5" } }, - "good-listener": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/good-listener/-/good-listener-1.2.2.tgz", - "integrity": "sha1-1TswzfkxPf+33JoNR3CWqm0UXFA=", - "requires": { - "delegate": "^3.1.2" - } - }, "graceful-fs": { "version": "4.2.6", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.6.tgz", @@ -18602,27 +18135,12 @@ "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==" }, - "hex-color-regex": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/hex-color-regex/-/hex-color-regex-1.1.0.tgz", - "integrity": "sha512-l9sfDFsuqtOqKDsQdqrMRk0U85RZc0RtOR9yPI7mRVOa4FsR/BVnZ0shmQRM96Ji99kYZP/7hn1cedc1+ApsTQ==" - }, "hosted-git-info": { "version": "2.8.9", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", "dev": true }, - "hsl-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/hsl-regex/-/hsl-regex-1.0.0.tgz", - "integrity": "sha1-1JMwx4ntgZ4nakwNJy3/owsY/m4=" - }, - "hsla-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/hsla-regex/-/hsla-regex-1.0.0.tgz", - "integrity": "sha1-wc56MWjIxmFAM6S194d/OyJfnDg=" - }, "html-encoding-sniffer": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz", @@ -18727,6 +18245,7 @@ "version": "3.3.0", "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, "requires": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" @@ -18775,11 +18294,6 @@ "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", "dev": true }, - "indexes-of": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/indexes-of/-/indexes-of-1.0.1.tgz", - "integrity": "sha1-8w9xbI4r00bHtn0985FVZqfAVgc=" - }, "inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", @@ -18805,11 +18319,6 @@ "resolved": "https://registry.npmjs.org/interpret/-/interpret-2.2.0.tgz", "integrity": "sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==" }, - "is-absolute-url": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/is-absolute-url/-/is-absolute-url-3.0.3.tgz", - "integrity": "sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q==" - }, "is-accessor-descriptor": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", @@ -18838,21 +18347,22 @@ "is-arrayish": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=" + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true }, "is-bigint": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.1.tgz", - "integrity": "sha512-J0ELF4yHFxHy0cmSxZuheDOz2luOdVvqjwmEcj8H/L1JHeuEDSDbeRP+Dk9kFVk5RTFzbucJ2Kb9F7ixY2QaCg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.2.tgz", + "integrity": "sha512-0JV5+SOCQkIdzjBK9buARcV804Ddu7A0Qet6sHi3FimE9ne6m4BGQZfRn+NZiXbBk4F4XmHfDZIipLj9pX8dSA==", "dev": true }, "is-boolean-object": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.0.tgz", - "integrity": "sha512-a7Uprx8UtD+HWdyYwnD1+ExtTgqQtD2k/1yJgtXP6wnMm8byhkoTZRl+95LLThpzNZJ5aEvi46cdH+ayMFRwmA==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.1.tgz", + "integrity": "sha512-bXdQWkECBUIAcCkeH1unwJLIpZYaa5VvuygSyS/c2lf719mTKZDU5UdDRlpd01UjADgmW8RfqaP+mRaVPdr/Ng==", "dev": true, "requires": { - "call-bind": "^1.0.0" + "call-bind": "^1.0.2" } }, "is-buffer": { @@ -18861,6 +18371,15 @@ "integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==", "dev": true }, + "is-builtin-module": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-3.1.0.tgz", + "integrity": "sha512-OV7JjAgOTfAFJmHZLvpSTb4qi0nIILDV1gWPYDnDJUTNFM5aGlRAhk4QcT8i7TuAleeEV5Fdkqn3t4mS+Q11fg==", + "dev": true, + "requires": { + "builtin-modules": "^3.0.0" + } + }, "is-callable": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.3.tgz", @@ -18884,30 +18403,10 @@ } } }, - "is-color-stop": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-color-stop/-/is-color-stop-1.1.0.tgz", - "integrity": "sha1-z/9HGu5N1cnhWFmPvhKWe1za00U=", - "requires": { - "css-color-names": "^0.0.4", - "hex-color-regex": "^1.1.0", - "hsl-regex": "^1.0.0", - "hsla-regex": "^1.0.0", - "rgb-regex": "^1.0.1", - "rgba-regex": "^1.0.0" - }, - "dependencies": { - "css-color-names": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/css-color-names/-/css-color-names-0.0.4.tgz", - "integrity": "sha1-gIrcLnnPhHOAabZGyyDsJ762KeA=" - } - } - }, "is-core-module": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.2.0.tgz", - "integrity": "sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ==", + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.4.0.tgz", + "integrity": "sha512-6A2fkfq1rfeQZjxrZJGerpLCTHRNEBiSgnu0+obeJpEPZRUooHgsizvzv0ZjJwOz3iWIHdJtVWJ/tmPr3D21/A==", "requires": { "has": "^1.0.3" } @@ -18922,9 +18421,9 @@ } }, "is-date-object": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", - "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.4.tgz", + "integrity": "sha512-/b4ZVsG7Z5XVtIxs/h9W8nvfLgSAyKYdtGWQLbqy6jA1icmgjf8WCoTKgeS4wy5tYaPePouzFMANbnj94c2Z+A==", "dev": true }, "is-decimal": { @@ -19002,16 +18501,11 @@ "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" }, "is-number-object": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.4.tgz", - "integrity": "sha512-zohwelOAur+5uXtk8O3GPQ1eAcu4ZX3UwxQhUlfFFMNpUd83gXgjbhJh6HmB6LUNV/ieOLQuDwJO3dWJosUeMw==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.5.tgz", + "integrity": "sha512-RU0lI/n95pMoUKu9v1BZP5MBcZuNSVJkMkAG2dJqC4z2GlkGUNeH68SuHuBKBD/XFe+LHZ+f9BKkLET60Niedw==", "dev": true }, - "is-obj": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", - "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==" - }, "is-plain-obj": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", @@ -19033,13 +18527,13 @@ "dev": true }, "is-regex": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.2.tgz", - "integrity": "sha512-axvdhb5pdhEVThqJzYXwMlVuZwC+FF2DpcOhTS+y/8jVq4trxyPgfcwIxIKiyeuLlSQYKkmUaPQJ8ZE4yNKXDg==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.3.tgz", + "integrity": "sha512-qSVXFz28HM7y+IWX6vLCsexdlvzT1PJNFSBuaQLQ5o0IEw8UDYW6/2+eCMVyIsbM8CNLX2a/QWmSpyxYEHY7CQ==", "dev": true, "requires": { "call-bind": "^1.0.2", - "has-symbols": "^1.0.1" + "has-symbols": "^1.0.2" } }, "is-regexp": { @@ -19048,29 +18542,24 @@ "integrity": "sha512-OZ4IlER3zmRIoB9AqNhEggVxqIH4ofDns5nRrPS6yQxXE1TPCUpFznBfRQmQa8uC+pXqjMnukiJBxCisIxiLGA==", "dev": true }, - "is-resolvable": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-resolvable/-/is-resolvable-1.1.0.tgz", - "integrity": "sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg==" - }, "is-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz", "integrity": "sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==" }, "is-string": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz", - "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.6.tgz", + "integrity": "sha512-2gdzbKUuqtQ3lYNrUTQYoClPhm7oQu4UdpSZMp1/DGgkHBT8E2Z1l0yMdb6D4zNAxwDiMv8MdulKROJGNl0Q0w==", "dev": true }, "is-symbol": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", - "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", + "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", "dev": true, "requires": { - "has-symbols": "^1.0.1" + "has-symbols": "^1.0.2" } }, "is-typedarray": { @@ -19174,6 +18663,14 @@ "debug": "^4.1.1", "istanbul-lib-coverage": "^3.0.0", "source-map": "^0.6.1" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } } }, "istanbul-reports": { @@ -19570,6 +19067,18 @@ "slash": "^3.0.0" }, "dependencies": { + "parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + } + }, "read-pkg": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", @@ -19819,7 +19328,8 @@ "js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true }, "js-yaml": { "version": "3.14.1", @@ -19872,9 +19382,9 @@ }, "dependencies": { "acorn": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.1.1.tgz", - "integrity": "sha512-xYiIVjNuqtKXMxlRMDc6mZUhXehod4a3gbZ1qRlM7icK4EbxUFNLhWoPblCvFtB2Y9CIqHP3CF/rdxLItaQv8g==", + "version": "8.2.4", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.2.4.tgz", + "integrity": "sha512-Ibt84YwBDDA890eDiDCEqcbwvHlBvzzDkU2cGBBDDI1QWT12jTiXIOn2CIw5KK4i6N5Z2HUxwYjzriDyqaqqZg==", "dev": true } } @@ -19893,7 +19403,8 @@ "json-parse-even-better-errors": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", - "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==" + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true }, "json-schema": { "version": "0.2.3", @@ -20003,6 +19514,12 @@ "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", "optional": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "optional": true } } }, @@ -20051,6 +19568,11 @@ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + }, "webpack-sources": { "version": "1.4.3", "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-1.4.3.tgz", @@ -20075,7 +19597,8 @@ "lines-and-columns": { "version": "1.1.6", "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.1.6.tgz", - "integrity": "sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA=" + "integrity": "sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA=", + "dev": true }, "load-json-file": { "version": "2.0.0", @@ -20089,15 +19612,6 @@ "strip-bom": "^3.0.0" }, "dependencies": { - "parse-json": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", - "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", - "dev": true, - "requires": { - "error-ex": "^1.2.0" - } - }, "strip-bom": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", @@ -20145,17 +19659,6 @@ "integrity": "sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=", "dev": true }, - "lodash.flatten": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz", - "integrity": "sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8=", - "dev": true - }, - "lodash.memoize": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", - "integrity": "sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4=" - }, "lodash.template": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/lodash.template/-/lodash.template-4.5.0.tgz", @@ -20179,11 +19682,6 @@ "integrity": "sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM=", "dev": true }, - "lodash.uniq": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz", - "integrity": "sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=" - }, "log-symbols": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", @@ -20307,7 +19805,8 @@ "mdn-data": { "version": "2.0.14", "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz", - "integrity": "sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==" + "integrity": "sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==", + "dev": true }, "meow": { "version": "9.0.0", @@ -20350,6 +19849,18 @@ "validate-npm-package-license": "^3.0.1" } }, + "parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + } + }, "read-pkg": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", @@ -20427,6 +19938,13 @@ "integrity": "sha512-Qkcp7P2ygktpMPh2mCQZaf3jhN6D3Z/qVZHSdWvQ+2Ef5HgRAPBO57A77+ENm0CPx2+1Ce/MYKi3ymqdfuqibw==", "requires": { "source-map": "^0.6.1" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + } } }, "merge-stream": { @@ -20440,9 +19958,9 @@ "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==" }, "mermaid": { - "version": "8.9.2", - "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-8.9.2.tgz", - "integrity": "sha512-XWEaraDRDlHZexdeHSSr/MH4VJAOksRSPudchi69ecZJ7IUjjlzHsg32n4ZwJUh6lFO+NMYLHwHNNYUyxIjGPg==", + "version": "8.10.1", + "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-8.10.1.tgz", + "integrity": "sha512-KxwKEJDKy303TQdz5TQMFb/4u+gUL21CefUMGOfuigDh9powcYaNmuJ5BkHmO0jB3Y1z2zlsuKvHZ2CusWH5+A==", "requires": { "@braintree/sanitize-url": "^3.1.0", "d3": "^5.7.0", @@ -20507,15 +20025,20 @@ "dev": true }, "mini-css-extract-plugin": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-1.5.0.tgz", - "integrity": "sha512-SIbuLMv6jsk1FnLIU5OUG/+VMGUprEjM1+o2trOAx8i5KOKMrhyezb1dJ4Ugsykb8Jgq8/w5NEopy6escV9G7g==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-1.6.0.tgz", + "integrity": "sha512-nPFKI7NSy6uONUo9yn2hIfb9vyYvkFu95qki0e21DQ9uaqNKDP15DGpK0KnV6wDroWxPHtExrdEwx/yDQ8nVRw==", "requires": { "loader-utils": "^2.0.0", "schema-utils": "^3.0.0", "webpack-sources": "^1.1.0" }, "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + }, "webpack-sources": { "version": "1.4.3", "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-1.4.3.tgz", @@ -20606,14 +20129,14 @@ "integrity": "sha512-9ARkWHBs+6YJIvrIp0Ik5tyTTtP9PoV0Ssu2Ocq5y9v8+NOOpWiRshAp8c4rZVWTOe+157on/5G+zj5pwIQFEQ==" }, "monaco-editor": { - "version": "0.23.0", - "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.23.0.tgz", - "integrity": "sha512-q+CP5zMR/aFiMTE9QlIavGyGicKnG2v/H8qVvybLzeFsARM8f6G9fL0sMST2tyVYCwDKkGamZUI6647A0jR/Lg==" + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.24.0.tgz", + "integrity": "sha512-o1f0Lz6ABFNTtnEqqqvlY9qzNx24rQZx1RgYNQ8SkWkE+Ka63keHH/RqxQ4QhN4fs/UYOnvAtEUZsPrzccH++A==" }, "monaco-editor-webpack-plugin": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/monaco-editor-webpack-plugin/-/monaco-editor-webpack-plugin-3.0.1.tgz", - "integrity": "sha512-Hym4HqWgIpyoi9G0spln/b/7rkDKfYwIOrNzo1fHHMc+MLYSwD1JXHwKSDS77X27ZHfVJsEXbMZYdGhSYuVF0w==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/monaco-editor-webpack-plugin/-/monaco-editor-webpack-plugin-3.1.0.tgz", + "integrity": "sha512-TP5NkCAV0OeFTry5k/d60KR7CkhTXL4kgJKtE3BzjgbDb5TGEPEhoKmHBrSa6r7Oc0sNbPLZhKD/TP2ig7A+/A==", "requires": { "loader-utils": "^2.0.0" } @@ -20630,9 +20153,9 @@ "dev": true }, "nanoid": { - "version": "3.1.22", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.22.tgz", - "integrity": "sha512-/2ZUaJX2ANuLtTvqTlgqBQNJoQO398KyJgZloL0PZkC0dpysjncRUPsFe3DUPzz/y3h+u7C46np8RMuvF3jsSQ==" + "version": "3.1.23", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.23.tgz", + "integrity": "sha512-FiB0kzdP0FFVGDKlRLEQ1BgDzU87dy5NnzjeW9YZNt+/c3+q82EQDUwniSAUxp/F0gFNI1ZhKU1FqYsMuqZVnw==" }, "nanomatch": { "version": "1.2.13", @@ -20776,11 +20299,6 @@ "integrity": "sha1-0LFF62kRicY6eNIB3E/bEpPvDAM=", "dev": true }, - "normalize-url": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-4.5.0.tgz", - "integrity": "sha512-2s47yzUxdexf1OhyRi4Em83iQk0aPvwTddtFz4hnSSw9dCEsLEGf6SwIO8ss/19S9iBb5sJaOuTvTGDeZI00BQ==" - }, "npm-run-path": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", @@ -20793,6 +20311,7 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.0.0.tgz", "integrity": "sha512-i4sc/Kj8htBrAiH1viZ0TgU8Y5XqCaV/FziYK6TBczxmeKm3AEFWqqF3195yKudrarqy7Zu80Ra5dobFjn9X/Q==", + "dev": true, "requires": { "boolbase": "^1.0.0" } @@ -20895,9 +20414,9 @@ } }, "object-inspect": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.10.2.tgz", - "integrity": "sha512-gz58rdPpadwztRrPjZE9DZLOABUpTGdcANUgOwBFO1C+HZZhePoP83M65WGDmbpwFYJSWqavbl4SgDn4k8RYTA==", + "version": "1.10.3", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.10.3.tgz", + "integrity": "sha512-e5mCJlSH7poANfC8z8S9s9S2IN5/4Zb3aZ33f5s8YqoazCFzNLloLU8r5VCG+G7WoqLvAAZoVMcy3tp/3X0Plw==", "dev": true }, "object-keys": { @@ -20991,11 +20510,11 @@ "dev": true }, "p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "requires": { - "yocto-queue": "^0.1.0" + "p-try": "^2.0.0" } }, "p-locate": { @@ -21004,16 +20523,6 @@ "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "requires": { "p-limit": "^2.2.0" - }, - "dependencies": { - "p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", - "requires": { - "p-try": "^2.0.0" - } - } } }, "p-try": { @@ -21033,6 +20542,7 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, "requires": { "callsites": "^3.0.0" } @@ -21052,14 +20562,12 @@ } }, "parse-json": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", - "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", + "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "dev": true, "requires": { - "@babel/code-frame": "^7.0.0", - "error-ex": "^1.3.1", - "json-parse-even-better-errors": "^2.3.0", - "lines-and-columns": "^1.1.6" + "error-ex": "^1.2.0" } }, "parse-ms": { @@ -21105,9 +20613,13 @@ "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" }, "path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", + "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", + "dev": true, + "requires": { + "pify": "^2.0.0" + } }, "performance-now": { "version": "2.1.0", @@ -21213,66 +20725,22 @@ "dev": true }, "postcss": { - "version": "8.2.12", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.2.12.tgz", - "integrity": "sha512-BJnGT5+0q2tzvs6oQfnY2NpEJ7rIXNfBnZtQOKCIsweeWXBXeDd5k31UgTdS3d/c02ouspufn37mTaHWkJyzMQ==", + "version": "8.2.15", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.2.15.tgz", + "integrity": "sha512-2zO3b26eJD/8rb106Qu2o7Qgg52ND5HPjcyQiK2B98O388h43A448LCslC0dI2P97wCAQRJsFvwTRcXxTKds+Q==", "requires": { "colorette": "^1.2.2", - "nanoid": "^3.1.22", + "nanoid": "^3.1.23", "source-map": "^0.6.1" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + } } }, - "postcss-calc": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-8.0.0.tgz", - "integrity": "sha512-5NglwDrcbiy8XXfPM11F3HeC6hoT9W7GUH/Zi5U/p7u3Irv4rHhdDcIZwG0llHXV4ftsBjpfWMXAnXNl4lnt8g==", - "requires": { - "postcss-selector-parser": "^6.0.2", - "postcss-value-parser": "^4.0.2" - } - }, - "postcss-colormin": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-5.0.0.tgz", - "integrity": "sha512-Yt84+5V6CgS/AhK7d7MA58vG8dSZ7+ytlRtWLaQhag3HXOncTfmYpuUOX4cDoXjvLfw1sHRCHMiBjYhc35CymQ==", - "requires": { - "browserslist": "^4.16.0", - "color": "^3.1.1", - "postcss-value-parser": "^4.1.0" - } - }, - "postcss-convert-values": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-5.0.0.tgz", - "integrity": "sha512-V5kmYm4xoBAjNs+eHY/6XzXJkkGeg4kwNf2ocfqhLb1WBPEa4oaSmoi1fnVO7Dkblqvus9h+AenDvhCKUCK7uQ==", - "requires": { - "postcss-value-parser": "^4.1.0" - } - }, - "postcss-discard-comments": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-5.0.0.tgz", - "integrity": "sha512-Umig6Gxs8m20RihiXY6QkePd6mp4FxkA1Dg+f/Kd6uw0gEMfKRjDeQOyFkLibexbJJGHpE3lrN/Q0R9SMrUMbQ==", - "requires": {} - }, - "postcss-discard-duplicates": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-5.0.0.tgz", - "integrity": "sha512-vEJJ+Y3pFUnO1FyCBA6PSisGjHtnphL3V6GsNvkASq/VkP3OX5/No5RYXXLxHa2QegStNzg6HYrYdo71uR4caQ==", - "requires": {} - }, - "postcss-discard-empty": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-5.0.0.tgz", - "integrity": "sha512-+wigy099Y1xZxG36WG5L1f2zeH1oicntkJEW4TDIqKKDO2g9XVB3OhoiHTu08rDEjLnbcab4rw0BAccwi2VjiQ==", - "requires": {} - }, - "postcss-discard-overridden": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-5.0.0.tgz", - "integrity": "sha512-hybnScTaZM2iEA6kzVQ6Spozy7kVdLw+lGw8hftLlBEzt93uzXoltkYp9u0tI8xbfhxDLTOOzHsHQCkYdmzRUg==", - "requires": {} - }, "postcss-html": { "version": "0.36.0", "resolved": "https://registry.npmjs.org/postcss-html/-/postcss-html-0.36.0.tgz", @@ -21425,6 +20893,12 @@ "supports-color": "^6.1.0" } }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, "supports-color": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", @@ -21442,79 +20916,6 @@ "integrity": "sha1-J7Ocb02U+Bsac7j3Y1HGCeXO8kQ=", "dev": true }, - "postcss-merge-longhand": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-5.0.0.tgz", - "integrity": "sha512-VZNFA40K8BYHzJNA6jHPdg1Nofsz/nK5Dkszrcb5IgWcLroSBZOD6I/iNQzpejSU/3XwpOiZNaYAdBV4KcvxWA==", - "requires": { - "css-color-names": "^1.0.1", - "postcss-value-parser": "^4.1.0", - "stylehacks": "^5.0.0" - } - }, - "postcss-merge-rules": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-5.0.0.tgz", - "integrity": "sha512-TfsXbKjNYCGfUPEXGIGPySnMiJbdS+3gcVeV8gwmJP4RajyKZHW8E0FYDL1WmggTj3hi+m+WUCAvqRpX2ut4Kg==", - "requires": { - "browserslist": "^4.16.0", - "caniuse-api": "^3.0.0", - "cssnano-utils": "^2.0.0", - "postcss-selector-parser": "^6.0.4", - "vendors": "^1.0.3" - } - }, - "postcss-minify-font-values": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-5.0.0.tgz", - "integrity": "sha512-zi2JhFaMOcIaNxhndX5uhsqSY1rexKDp23wV8EOmC9XERqzLbHsoRye3aYF716Zm+hkcR4loqKDt8LZlmihwAg==", - "requires": { - "postcss-value-parser": "^4.1.0" - } - }, - "postcss-minify-gradients": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-5.0.0.tgz", - "integrity": "sha512-/jPtNgs6JySMwgsE5dPOq8a2xEopWTW3RyqoB9fLqxgR+mDUNLSi7joKd+N1z7FXWgVkc4l/dEBMXHgNAaUbvg==", - "requires": { - "cssnano-utils": "^2.0.0", - "is-color-stop": "^1.1.0", - "postcss-value-parser": "^4.1.0" - } - }, - "postcss-minify-params": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-5.0.0.tgz", - "integrity": "sha512-KvZYIxTPBVKjdd+XgObq9A+Sfv8lMkXTpbZTsjhr42XbfWIeLaTItMlygsDWfjArEc3muUfDaUFgNSeDiJ5jug==", - "requires": { - "alphanum-sort": "^1.0.2", - "browserslist": "^4.16.0", - "cssnano-utils": "^2.0.0", - "postcss-value-parser": "^4.1.0", - "uniqs": "^2.0.0" - } - }, - "postcss-minify-selectors": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-5.0.0.tgz", - "integrity": "sha512-cEM0O0eWwFIvmo6nfB0lH0vO/XFwgqIvymODbfPXZ1gTA3i76FKnb7TGUrEpiTxaXH6tgYQ6DcTHwRiRS+YQLQ==", - "requires": { - "alphanum-sort": "^1.0.2", - "postcss-selector-parser": "^3.1.2" - }, - "dependencies": { - "postcss-selector-parser": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-3.1.2.tgz", - "integrity": "sha512-h7fJ/5uWuRVyOtkO45pnt1Ih40CEleeyCHzipqAZO2e5H20g25Y48uYnFUiShvY4rZWNJ/Bib/KVPmanaCtOhA==", - "requires": { - "dot-prop": "^5.2.0", - "indexes-of": "^1.0.1", - "uniq": "^1.0.1" - } - } - } - }, "postcss-modules-extract-imports": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz", @@ -21547,109 +20948,6 @@ "icss-utils": "^5.0.0" } }, - "postcss-normalize-charset": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-5.0.0.tgz", - "integrity": "sha512-pqsCkgo9KmQP0ew6DqSA+uP9YN6EfsW20pQ3JU5JoQge09Z6Too4qU0TNDsTNWuEaP8SWsMp+19l15210MsDZQ==", - "requires": {} - }, - "postcss-normalize-display-values": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-5.0.0.tgz", - "integrity": "sha512-t4f2d//gH1f7Ns0Jq3eNdnWuPT7TeLuISZ6RQx4j8gpl5XrhkdshdNcOnlrEK48YU6Tcb6jqK7dorME3N4oOGA==", - "requires": { - "cssnano-utils": "^2.0.0", - "postcss-value-parser": "^4.1.0" - } - }, - "postcss-normalize-positions": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-5.0.0.tgz", - "integrity": "sha512-0o6/qU5ky74X/eWYj/tv4iiKCm3YqJnrhmVADpIMNXxzFZywsSQxl8F7cKs8jQEtF3VrJBgcDHTexZy1zgDoYg==", - "requires": { - "postcss-value-parser": "^4.1.0" - } - }, - "postcss-normalize-repeat-style": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.0.0.tgz", - "integrity": "sha512-KRT14JbrXKcFMYuc4q7lh8lvv8u22wLyMrq+UpHKLtbx2H/LOjvWXYdoDxmNrrrJzomAWL+ViEXr48/IhSUJnQ==", - "requires": { - "cssnano-utils": "^2.0.0", - "postcss-value-parser": "^4.1.0" - } - }, - "postcss-normalize-string": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-5.0.0.tgz", - "integrity": "sha512-wSO4pf7GNcDZpmelREWYADF1+XZWrAcbFLQCOqoE92ZwYgaP/RLumkUTaamEzdT2YKRZAH8eLLKGWotU/7FNPw==", - "requires": { - "postcss-value-parser": "^4.1.0" - } - }, - "postcss-normalize-timing-functions": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.0.0.tgz", - "integrity": "sha512-TwPaDX+wl9wO3MUm23lzGmOzGCGKnpk+rSDgzB2INpakD5dgWR3L6bJq1P1LQYzBAvz8fRIj2NWdnZdV4EV98Q==", - "requires": { - "cssnano-utils": "^2.0.0", - "postcss-value-parser": "^4.1.0" - } - }, - "postcss-normalize-unicode": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-5.0.0.tgz", - "integrity": "sha512-2CpVoz/67rXU5s9tsPZDxG1YGS9OFHwoY9gsLAzrURrCxTAb0H7Vp87/62LvVPgRWTa5ZmvgmqTp2rL8tlm72A==", - "requires": { - "browserslist": "^4.16.0", - "postcss-value-parser": "^4.1.0" - } - }, - "postcss-normalize-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-5.0.0.tgz", - "integrity": "sha512-ICDaGFBqLgA3dlrCIRuhblLl80D13YtgEV9NJPTYJtgR72vu61KgxAHv+z/lKMs1EbwfSQa3ALjOFLSmXiE34A==", - "requires": { - "is-absolute-url": "^3.0.3", - "normalize-url": "^4.5.0", - "postcss-value-parser": "^4.1.0" - } - }, - "postcss-normalize-whitespace": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.0.0.tgz", - "integrity": "sha512-KRnxQvQAVkJfaeXSz7JlnD9nBN9sFZF9lrk9452Q2uRoqrRSkinqifF8Iex7wZGei2DZVG/qpmDFDmRvbNAOGA==", - "requires": { - "postcss-value-parser": "^4.1.0" - } - }, - "postcss-ordered-values": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-5.0.0.tgz", - "integrity": "sha512-dPr+SRObiHueCIc4IUaG0aOGQmYkuNu50wQvdXTGKy+rzi2mjmPsbeDsheLk5WPb9Zyf2tp8E+I+h40cnivm6g==", - "requires": { - "cssnano-utils": "^2.0.0", - "postcss-value-parser": "^4.1.0" - } - }, - "postcss-reduce-initial": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-5.0.0.tgz", - "integrity": "sha512-wR6pXUaFbSMG1oCKx8pKVA+rnSXCHlca5jMrlmkmif+uig0HNUTV9oGN5kjKsM3mATQAldv2PF9Tbl2vqLFjnA==", - "requires": { - "browserslist": "^4.16.0", - "caniuse-api": "^3.0.0" - } - }, - "postcss-reduce-transforms": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-5.0.0.tgz", - "integrity": "sha512-iHdGODW4YzM3WjVecBhPQt6fpJC4lGQZxJKjkBNHpp2b8dzmvj0ogKThqya+IRodQEFzjfXgYeESkf172FH5Lw==", - "requires": { - "cssnano-utils": "^2.0.0", - "postcss-value-parser": "^4.1.0" - } - }, "postcss-resolve-nested-selector": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/postcss-resolve-nested-selector/-/postcss-resolve-nested-selector-0.1.1.tgz", @@ -21728,6 +21026,12 @@ "supports-color": "^6.1.0" } }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, "supports-color": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", @@ -21812,6 +21116,12 @@ "supports-color": "^6.1.0" } }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, "supports-color": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", @@ -21895,6 +21205,12 @@ "supports-color": "^6.1.0" } }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, "supports-color": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", @@ -21907,23 +21223,14 @@ } }, "postcss-selector-parser": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.5.tgz", - "integrity": "sha512-aFYPoYmXbZ1V6HZaSvat08M97A8HqO6Pjz+PiNpw/DhuRrC72XWAdp3hL6wusDCN31sSmcZyMGa2hZEuX+Xfhg==", + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.6.tgz", + "integrity": "sha512-9LXrvaaX3+mcv5xkg5kFwqSzSH1JIObIx51PrndZwlmznwXRfxMddDvo9gve3gVR8ZTKgoFDdWkbRFmEhT4PMg==", "requires": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" } }, - "postcss-svgo": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-5.0.0.tgz", - "integrity": "sha512-M3/VS4sFI1Yp9g0bPL+xzzCNz5iLdRUztoFaugMit5a8sMfkVzzhwqbsOlD8IFFymCdJDmXmh31waYHWw1K4BA==", - "requires": { - "postcss-value-parser": "^4.1.0", - "svgo": "^2.3.0" - } - }, "postcss-syntax": { "version": "0.36.2", "resolved": "https://registry.npmjs.org/postcss-syntax/-/postcss-syntax-0.36.2.tgz", @@ -21931,16 +21238,6 @@ "dev": true, "requires": {} }, - "postcss-unique-selectors": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-5.0.0.tgz", - "integrity": "sha512-o9l4pF8SRn7aCMTmzb/kNv/kjV7wPZpZ8Nlb1Gq8v/Qvw969K1wanz1RVA0ehHzWe9+wHXaC2DvZlak/gdMJ5w==", - "requires": { - "alphanum-sort": "^1.0.2", - "postcss-selector-parser": "^6.0.2", - "uniqs": "^2.0.0" - } - }, "postcss-value-parser": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.1.0.tgz", @@ -22066,17 +21363,6 @@ "load-json-file": "^2.0.0", "normalize-package-data": "^2.3.2", "path-type": "^2.0.0" - }, - "dependencies": { - "path-type": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", - "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", - "dev": true, - "requires": { - "pify": "^2.0.0" - } - } } }, "read-pkg-up": { @@ -22388,7 +21674,8 @@ "resolve-from": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==" + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true }, "resolve-url": { "version": "0.2.1", @@ -22407,16 +21694,6 @@ "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==" }, - "rgb-regex": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/rgb-regex/-/rgb-regex-1.0.1.tgz", - "integrity": "sha1-wODWiC3w4jviVKR16O3UGRX+rrE=" - }, - "rgba-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/rgba-regex/-/rgba-regex-1.0.0.tgz", - "integrity": "sha1-QzdOLiyglosO8VI0YLfXMP8i7rM=" - }, "rimraf": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", @@ -22730,11 +22007,6 @@ "ajv-keywords": "^3.5.2" } }, - "select": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/select/-/select-1.1.2.tgz", - "integrity": "sha1-DnNQrN7ICxEIUoeG7B1EGNEbOW0=" - }, "semver": { "version": "7.3.5", "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", @@ -22819,21 +22091,6 @@ "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==" }, - "simple-swizzle": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", - "integrity": "sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo=", - "requires": { - "is-arrayish": "^0.3.1" - }, - "dependencies": { - "is-arrayish": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", - "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" - } - } - }, "sisteransi": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", @@ -22974,12 +22231,6 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", "dev": true - }, - "source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "dev": true } } }, @@ -23042,9 +22293,10 @@ "integrity": "sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw==" }, "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "dev": true }, "source-map-resolve": { "version": "0.5.3", @@ -23066,6 +22318,13 @@ "requires": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + } } }, "source-map-url": { @@ -23127,9 +22386,9 @@ "integrity": "sha512-mcdpQFV7UDAgLpXEE/jOMqvK4LBoO0uTQg0uvXUewmEFhpiZx5yJSZITHB8w1ZahKdhfZqP5GPEOKLyEq5p8XA==" }, "spdx-satisfies": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/spdx-satisfies/-/spdx-satisfies-5.0.0.tgz", - "integrity": "sha512-/hGhwh20BeGmkA+P/lm06RvXD94JduwNxtx/oX3B5ClPt1/u/m5MCaDNo1tV3Y9laLkQr/NRde63b9lLMhlNfw==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/spdx-satisfies/-/spdx-satisfies-5.0.1.tgz", + "integrity": "sha512-Nwor6W6gzFp8XX4neaKQ7ChV4wmpSh2sSDemMFSzHxpTw460jxFYeOn+jq4ybnSSw/5sc3pjka9MQPouksQNpw==", "requires": { "spdx-compare": "^1.0.0", "spdx-expression-parse": "^3.0.0", @@ -23177,7 +22436,8 @@ "stable": { "version": "0.1.8", "resolved": "https://registry.npmjs.org/stable/-/stable-0.1.8.tgz", - "integrity": "sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==" + "integrity": "sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==", + "dev": true }, "stack-utils": { "version": "2.0.3", @@ -23389,26 +22649,17 @@ "integrity": "sha1-eVjHk+R+MuB9K1yv5cC/jhLneQI=", "dev": true }, - "stylehacks": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/stylehacks/-/stylehacks-5.0.0.tgz", - "integrity": "sha512-QOWm6XivDLb+fqffTZP8jrmPmPITVChl2KCY2R05nsCWwLi3VGhCdVc3IVGNwd1zzTt1jPd67zIKjpQfxzQZeA==", - "requires": { - "browserslist": "^4.16.0", - "postcss-selector-parser": "^6.0.4" - } - }, "stylelint": { - "version": "13.12.0", - "resolved": "https://registry.npmjs.org/stylelint/-/stylelint-13.12.0.tgz", - "integrity": "sha512-P8O1xDy41B7O7iXaSlW+UuFbE5+ZWQDb61ndGDxKIt36fMH50DtlQTbwLpFLf8DikceTAb3r6nPrRv30wBlzXw==", + "version": "13.13.1", + "resolved": "https://registry.npmjs.org/stylelint/-/stylelint-13.13.1.tgz", + "integrity": "sha512-Mv+BQr5XTUrKqAXmpqm6Ddli6Ief+AiPZkRsIrAoUKFuq/ElkUh9ZMYxXD0iQNZ5ADghZKLOWz1h7hTClB7zgQ==", "dev": true, "requires": { "@stylelint/postcss-css-in-js": "^0.37.2", "@stylelint/postcss-markdown": "^0.36.2", "autoprefixer": "^9.8.6", - "balanced-match": "^1.0.0", - "chalk": "^4.1.0", + "balanced-match": "^2.0.0", + "chalk": "^4.1.1", "cosmiconfig": "^7.0.0", "debug": "^4.3.1", "execall": "^2.0.0", @@ -23417,7 +22668,7 @@ "file-entry-cache": "^6.0.1", "get-stdin": "^8.0.0", "global-modules": "^2.0.0", - "globby": "^11.0.2", + "globby": "^11.0.3", "globjoin": "^0.1.4", "html-tags": "^3.1.0", "ignore": "^5.1.8", @@ -23425,10 +22676,10 @@ "imurmurhash": "^0.1.4", "known-css-properties": "^0.21.0", "lodash": "^4.17.21", - "log-symbols": "^4.0.0", + "log-symbols": "^4.1.0", "mathml-tag-names": "^2.1.3", "meow": "^9.0.0", - "micromatch": "^4.0.2", + "micromatch": "^4.0.4", "normalize-selector": "^0.2.0", "postcss": "^7.0.35", "postcss-html": "^0.36.0", @@ -23438,7 +22689,7 @@ "postcss-safe-parser": "^4.0.2", "postcss-sass": "^0.4.4", "postcss-scss": "^2.1.1", - "postcss-selector-parser": "^6.0.4", + "postcss-selector-parser": "^6.0.5", "postcss-syntax": "^0.36.2", "postcss-value-parser": "^4.1.0", "resolve-from": "^5.0.0", @@ -23449,8 +22700,8 @@ "style-search": "^0.1.0", "sugarss": "^2.0.0", "svg-tags": "^1.0.0", - "table": "^6.0.7", - "v8-compile-cache": "^2.2.0", + "table": "^6.6.0", + "v8-compile-cache": "^2.3.0", "write-file-atomic": "^3.0.3" }, "dependencies": { @@ -23463,6 +22714,12 @@ "color-convert": "^1.9.0" } }, + "balanced-match": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-2.0.0.tgz", + "integrity": "sha512-1ugUSr8BHXRnK23KfuYS+gVMC3LB8QGH9W1iGtDPsNWoQbgtXSExkBu2aDR4epiGWZOjZsj6lDl/N/AqqTC3UA==", + "dev": true + }, "color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", @@ -23531,6 +22788,12 @@ "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", "dev": true }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, "supports-color": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", @@ -23543,19 +22806,19 @@ } }, "stylelint-config-recommended": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/stylelint-config-recommended/-/stylelint-config-recommended-4.0.0.tgz", - "integrity": "sha512-sgna89Ng+25Hr9kmmaIxpGWt2LStVm1xf1807PdcWasiPDaOTkOHRL61sINw0twky7QMzafCGToGDnHT/kTHtQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/stylelint-config-recommended/-/stylelint-config-recommended-5.0.0.tgz", + "integrity": "sha512-c8aubuARSu5A3vEHLBeOSJt1udOdS+1iue7BmJDTSXoCBmfEQmmWX+59vYIj3NQdJBY6a/QRv1ozVFpaB9jaqA==", "dev": true, "requires": {} }, "stylelint-config-standard": { - "version": "21.0.0", - "resolved": "https://registry.npmjs.org/stylelint-config-standard/-/stylelint-config-standard-21.0.0.tgz", - "integrity": "sha512-Yf6mx5oYEbQQJxWuW7X3t1gcxqbUx52qC9SMS3saC2ruOVYEyqmr5zSW6k3wXflDjjFrPhar3kp68ugRopmlzg==", + "version": "22.0.0", + "resolved": "https://registry.npmjs.org/stylelint-config-standard/-/stylelint-config-standard-22.0.0.tgz", + "integrity": "sha512-uQVNi87SHjqTm8+4NIP5NMAyY/arXrBgimaaT7skvRfE9u3JKXRK9KBkbr4pVmeciuCcs64kAdjlxfq6Rur7Hw==", "dev": true, "requires": { - "stylelint-config-recommended": "^4.0.0" + "stylelint-config-recommended": "^5.0.0" } }, "stylis": { @@ -23635,6 +22898,12 @@ "supports-color": "^6.1.0" } }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, "supports-color": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", @@ -23679,6 +22948,7 @@ "version": "2.3.0", "resolved": "https://registry.npmjs.org/svgo/-/svgo-2.3.0.tgz", "integrity": "sha512-fz4IKjNO6HDPgIQxu4IxwtubtbSfGEAJUq/IXyTPIkGhWck/faiiwfkvsB8LnBkKLvSoyNNIY6d13lZprJMc9Q==", + "dev": true, "requires": { "@trysound/sax": "0.1.1", "chalk": "^4.1.0", @@ -23692,14 +22962,15 @@ "commander": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", - "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==" + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "dev": true } } }, "swagger-ui-dist": { - "version": "3.47.1", - "resolved": "https://registry.npmjs.org/swagger-ui-dist/-/swagger-ui-dist-3.47.1.tgz", - "integrity": "sha512-7b9iHDC/GGC9SJLd3HiV/3EnsJ3wu7xN8Q4MpOPfQO8UG7TQFG2TMTDkvvy0SNeqxQY0tGQY0ppZC9a95tW3kg==" + "version": "3.48.0", + "resolved": "https://registry.npmjs.org/swagger-ui-dist/-/swagger-ui-dist-3.48.0.tgz", + "integrity": "sha512-UgpKIQW5RAb4nYRG8B615blmQzct0DNuvtX4904Fe2aMWAVfWeKHKl4kwzFXuBJgr2WYWTwM1PnhZ+qqkLrpPg==" }, "symbol-tree": { "version": "3.2.4", @@ -23708,26 +22979,23 @@ "dev": true }, "table": { - "version": "6.3.2", - "resolved": "https://registry.npmjs.org/table/-/table-6.3.2.tgz", - "integrity": "sha512-I9/Ca6Huf2oxFag7crD0DhA+arIdfLtWunSn0NIXSzjtUlDgIBGVZY7SsMkNPNT3Psd/z4gza0nuEpmra9eRbg==", + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/table/-/table-6.7.0.tgz", + "integrity": "sha512-SAM+5p6V99gYiiy2gT5ArdzgM1dLDed0nkrWmG6Fry/bUS/m9x83BwpJUOf1Qj/x2qJd+thL6IkIx7qPGRxqBw==", "dev": true, "requires": { "ajv": "^8.0.1", - "is-boolean-object": "^1.1.0", - "is-number-object": "^1.0.4", - "is-string": "^1.0.5", "lodash.clonedeep": "^4.5.0", - "lodash.flatten": "^4.4.0", "lodash.truncate": "^4.4.2", "slice-ansi": "^4.0.0", - "string-width": "^4.2.0" + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0" }, "dependencies": { "ajv": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.1.0.tgz", - "integrity": "sha512-B/Sk2Ix7A36fs/ZkuGLIR86EdjbgR6fsAcbx9lOP/QBSXujDNbVmIS/U4Itz5k8fPFDeVZl/zQ/gJW4Jrq6XjQ==", + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.3.0.tgz", + "integrity": "sha512-RYE7B5An83d7eWnDR8kbdaIFqmKCNsP16ay1hDbJEU+sa0e3H9SebskCt0Uufem6cfAVu7Col6ubcn/W+Sm8/Q==", "dev": true, "requires": { "fast-deep-equal": "^3.1.1", @@ -23781,25 +23049,45 @@ "commander": "^2.20.0", "source-map": "~0.6.1", "source-map-support": "~0.5.12" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + } } }, "terser-webpack-plugin": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.1.1.tgz", - "integrity": "sha512-5XNNXZiR8YO6X6KhSGXfY0QrGrCRlSwAEjIIrlRQR4W8nP69TaJUlh3bkuac6zzgspiGPfKEHcY295MMVExl5Q==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.1.2.tgz", + "integrity": "sha512-6QhDaAiVHIQr5Ab3XUWZyDmrIPCHMiqJVljMF91YKyqwKkL5QHnYMkrMBy96v9Z7ev1hGhSEw1HQZc2p/s5Z8Q==", "requires": { "jest-worker": "^26.6.2", "p-limit": "^3.1.0", "schema-utils": "^3.0.0", "serialize-javascript": "^5.0.1", "source-map": "^0.6.1", - "terser": "^5.5.1" + "terser": "^5.7.0" }, "dependencies": { + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + }, "terser": { - "version": "5.6.1", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.6.1.tgz", - "integrity": "sha512-yv9YLFQQ+3ZqgWCUk+pvNJwgUTdlIxUk1WTN+RnaFJe2L7ipG2csPT0ra2XRm7Cs8cxN7QXmK1rFzEwYEQkzXw==", + "version": "5.7.0", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.7.0.tgz", + "integrity": "sha512-HP5/9hp2UaZt5fYkuhNBR8YyRcT8juw8+uFbAme53iN9hblvKnLUTKkmwJG6ocWpIKf8UK4DoeWG4ty0J6S6/g==", "requires": { "commander": "^2.20.0", "source-map": "~0.7.2", @@ -23838,16 +23126,6 @@ "integrity": "sha512-fcwX4mndzpLQKBS1DVYhGAcYaYt7vsHNIvQV+WXMvnow5cgjPphq5CaayLaGsjRdSCKZFNGt7/GYAuXaNOiYCA==", "dev": true }, - "timsort": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/timsort/-/timsort-0.3.0.tgz", - "integrity": "sha1-QFQRqOfmM5/mTbmiNN4R3DHgK9Q=" - }, - "tiny-emitter": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/tiny-emitter/-/tiny-emitter-2.1.0.tgz", - "integrity": "sha512-NB6Dk1A9xgQPMoGqC5CVXn123gWyte215ONT5Pp5a0yt4nlEoO1ZWeCwpncaekPHXO60i47ihFnZPiRPjRMq4Q==" - }, "tmpl": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.4.tgz", @@ -24029,9 +23307,9 @@ "dev": true }, "type-fest": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-1.0.2.tgz", - "integrity": "sha512-a720oz3Kjbp3ll0zkeN9qjRhO7I34MKMhPGQiQJAmaZQZQ1lo+NWThK322f7sXV+kTg9B1Ybt16KgBXWgteT8w==" + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-1.1.1.tgz", + "integrity": "sha512-RPDKc5KrIyKTP7Fk75LruUagqG6b+OTgXlCR2Z0aQDJFeIvL4/mhahSEtHmmVzXu4gmA0srkF/8FCH3WOWxTWA==" }, "typedarray-to-buffer": { "version": "3.1.5", @@ -24048,9 +23326,9 @@ "integrity": "sha512-dELuLBVa2jvWdU/CHTKi2L/POYaRupv942k+vRsFXsM17acXesQGAiGCio82RW7fvcr7bkuD/Zj8XpUh6aPC2A==" }, "uglify-js": { - "version": "3.13.4", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.13.4.tgz", - "integrity": "sha512-kv7fCkIXyQIilD5/yQy8O+uagsYIOt5cZvs890W40/e/rvjMSzJw81o9Bg0tkURxzZBROtDQhW2LFjOGoK3RZw==" + "version": "3.13.6", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.13.6.tgz", + "integrity": "sha512-rRprLwl8RVaS+Qvx3Wh5hPfPBn9++G6xkGlUupya0s5aDmNjI7z3lnRLB3u7sN4OmbB0pWgzhM9BEJyiWAwtAA==" }, "unbox-primitive": { "version": "1.0.1", @@ -24106,16 +23384,6 @@ } } }, - "uniq": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/uniq/-/uniq-1.0.1.tgz", - "integrity": "sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8=" - }, - "uniqs": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/uniqs/-/uniqs-2.0.0.tgz", - "integrity": "sha1-/+3ks2slKQaW5uFl1KWe25mOawI=" - }, "unist-util-find-all-after": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/unist-util-find-all-after/-/unist-util-find-all-after-3.0.2.tgz", @@ -24187,9 +23455,9 @@ } }, "updates": { - "version": "12.0.2", - "resolved": "https://registry.npmjs.org/updates/-/updates-12.0.2.tgz", - "integrity": "sha512-Y5Gnb5MCW9tzjee5on+t4rLsM0gT13ZZ/Iy4wQFO4DdLh8p/XBQuStd4TSUB64kMfYXUIN+vb7wHs7w16KiNXw==", + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/updates/-/updates-12.1.0.tgz", + "integrity": "sha512-cC/jeGLoeMiu0NteTQsFZTQ9p1aLYs9uODV3HbS3Zx7fAk+dY0GsrUCC8C153szTH3X9NkPtYp0FpLLS2qIKMw==", "dev": true }, "upper-case": { @@ -24246,9 +23514,9 @@ "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==" }, "v8-to-istanbul": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-7.1.1.tgz", - "integrity": "sha512-p0BB09E5FRjx0ELN6RgusIPsSPhtgexSRcKETybEs6IGOTXJSZqfwxp7r//55nnu0f1AxltY5VvdVqy2vZf9AA==", + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-7.1.2.tgz", + "integrity": "sha512-TxNb7YEUwkLXCQYeudi6lgQ/SZrzNO4kMdlqVxaZPUIUjCv6iSSypUQX70kNBSERpQ8fk48+d61FXk+tgqcWow==", "dev": true, "requires": { "@types/istanbul-lib-coverage": "^2.0.1", @@ -24274,11 +23542,6 @@ "spdx-expression-parse": "^3.0.0" } }, - "vendors": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/vendors/-/vendors-1.0.4.tgz", - "integrity": "sha512-/juG65kTL4Cy2su4P8HjtkTxk6VmJDiOPBufWniqQ6wknac6jNiXS9vU+hO3wgusiyqWlzTbVHi0dyJqRONg3w==" - }, "verror": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", @@ -24318,11 +23581,10 @@ "integrity": "sha512-uhmLFETqPPNyuLLbsKz6ioJ4q7AZHzD8ZVFNATNyICSZouqP2Sz0rotWQC8UNBF6VGSCs5abnKJoStA6JbCbfg==" }, "vue-bar-graph": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/vue-bar-graph/-/vue-bar-graph-1.2.0.tgz", - "integrity": "sha512-vqe2KZPlm0JpKMxDlgTwGDYnLvN32dSLrGm4EC2ivuiZ2FJ8T8/mqi67XEeM0zkFKi9jmp7U09zKCYkg1ag+WQ==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/vue-bar-graph/-/vue-bar-graph-1.2.2.tgz", + "integrity": "sha512-XasqdeZeFAio2CgK04G3cHqY14Ogh/QibPX/uX1umkxLmguIKpSnU1cy8R8XQwmbIrS4c2v6Kp5c/A+QrBUfhg==", "requires": { - "core-js": "^3.6.1", "gsap": "^3.0.4", "vue": "^2.6.11" } @@ -24374,9 +23636,9 @@ "integrity": "sha512-BXq3jwIagosjgNVae6tkHzzIk6a8MHFtzAdwhnV5VlvPTFxDCvIttgSiHWjdGoTJvXtmRu5HacExfdarRcFhog==" }, "vue-loader": { - "version": "15.9.6", - "resolved": "https://registry.npmjs.org/vue-loader/-/vue-loader-15.9.6.tgz", - "integrity": "sha512-j0cqiLzwbeImIC6nVIby2o/ABAWhlppyL/m5oJ67R5MloP0hj/DtFgb0Zmq3J9CG7AJ+AXIvHVnJAPBvrLyuDg==", + "version": "15.9.7", + "resolved": "https://registry.npmjs.org/vue-loader/-/vue-loader-15.9.7.tgz", + "integrity": "sha512-qzlsbLV1HKEMf19IqCJqdNvFJRCI58WNbS6XbPqK13MrLz65es75w392MSQ5TsARAfIjUw+ATm3vlCXUJSOH9Q==", "requires": { "@vue/component-compiler-utils": "^3.1.0", "hash-sum": "^1.0.2", @@ -24499,16 +23761,16 @@ "dev": true }, "webpack": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.35.0.tgz", - "integrity": "sha512-au3gu55yYF/h6NXFr0KZPZAYxS6Nlc595BzYPke8n0CSff5WXcoixtjh5LC/8mXunkRKxhymhXmBY0+kEbR6jg==", + "version": "5.37.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.37.0.tgz", + "integrity": "sha512-yvdhgcI6QkQkDe1hINBAJ1UNevqNGTVaCkD2SSJcB8rcrNNl922RI8i2DXUAuNfANoxwsiXXEA4ZPZI9q2oGLA==", "requires": { "@types/eslint-scope": "^3.7.0", "@types/estree": "^0.0.47", "@webassemblyjs/ast": "1.11.0", "@webassemblyjs/wasm-edit": "1.11.0", "@webassemblyjs/wasm-parser": "1.11.0", - "acorn": "^8.0.4", + "acorn": "^8.2.1", "browserslist": "^4.14.5", "chrome-trace-event": "^1.0.2", "enhanced-resolve": "^5.8.0", @@ -24529,24 +23791,23 @@ }, "dependencies": { "acorn": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.1.1.tgz", - "integrity": "sha512-xYiIVjNuqtKXMxlRMDc6mZUhXehod4a3gbZ1qRlM7icK4EbxUFNLhWoPblCvFtB2Y9CIqHP3CF/rdxLItaQv8g==" + "version": "8.2.4", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.2.4.tgz", + "integrity": "sha512-Ibt84YwBDDA890eDiDCEqcbwvHlBvzzDkU2cGBBDDI1QWT12jTiXIOn2CIw5KK4i6N5Z2HUxwYjzriDyqaqqZg==" } } }, "webpack-cli": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-4.6.0.tgz", - "integrity": "sha512-9YV+qTcGMjQFiY7Nb1kmnupvb1x40lfpj8pwdO/bom+sQiP4OBMKjHq29YQrlDWDPZO9r/qWaRRywKaRDKqBTA==", + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-4.7.0.tgz", + "integrity": "sha512-7bKr9182/sGfjFm+xdZSwgQuFjgEcy0iCTIBxRUeteJ2Kr8/Wz0qNJX+jw60LU36jApt4nmMkep6+W5AKhok6g==", "requires": { "@discoveryjs/json-ext": "^0.5.0", - "@webpack-cli/configtest": "^1.0.2", - "@webpack-cli/info": "^1.2.3", - "@webpack-cli/serve": "^1.3.1", + "@webpack-cli/configtest": "^1.0.3", + "@webpack-cli/info": "^1.2.4", + "@webpack-cli/serve": "^1.4.0", "colorette": "^1.2.1", "commander": "^7.0.0", - "enquirer": "^2.3.6", "execa": "^5.0.0", "fastest-levenshtein": "^1.0.12", "import-local": "^3.0.2", @@ -24605,6 +23866,13 @@ "requires": { "source-list-map": "^2.0.1", "source-map": "^0.6.1" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + } } }, "whatwg-encoding": { @@ -24801,7 +24069,8 @@ "yaml": { "version": "1.10.2", "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", - "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==" + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "dev": true }, "yargs": { "version": "15.4.1", diff --git a/package.json b/package.json index b23e27309f35..bbd565803181 100644 --- a/package.json +++ b/package.json @@ -9,13 +9,11 @@ "@claviska/jquery-minicolors": "2.3.5", "@primer/octicons": "13.0.0", "add-asset-webpack-plugin": "2.0.1", - "clipboard": "2.0.8", "codemirror": "5.61.0", "css-loader": "5.2.4", - "css-minimizer-webpack-plugin": "2.0.0", "dropzone": "5.9.2", - "easymde": "2.14.0", - "esbuild-loader": "2.12.0", + "easymde": "2.15.0", + "esbuild-loader": "2.13.0", "escape-goat": "4.0.0", "fast-glob": "3.2.5", "font-awesome": "4.7.0", @@ -24,22 +22,21 @@ "less": "4.1.1", "less-loader": "8.1.1", "license-checker-webpack-plugin": "0.2.1", - "mermaid": "8.9.2", - "mini-css-extract-plugin": "1.5.0", - "monaco-editor": "0.23.0", - "monaco-editor-webpack-plugin": "3.0.1", - "postcss": "8.2.12", + "mermaid": "8.10.1", + "mini-css-extract-plugin": "1.6.0", + "monaco-editor": "0.24.0", + "monaco-editor-webpack-plugin": "3.1.0", "pretty-ms": "7.0.1", "sortablejs": "1.13.0", - "swagger-ui-dist": "3.47.1", + "swagger-ui-dist": "3.48.0", "tributejs": "5.1.3", "vue": "2.6.12", - "vue-bar-graph": "1.2.0", + "vue-bar-graph": "1.2.2", "vue-calendar-heatmap": "0.8.4", - "vue-loader": "15.9.6", + "vue-loader": "15.9.7", "vue-template-compiler": "2.6.12", - "webpack": "5.35.0", - "webpack-cli": "4.6.0", + "webpack": "5.37.0", + "webpack-cli": "4.7.0", "workbox-routing": "6.1.5", "workbox-strategies": "6.1.5", "worker-loader": "3.0.8", @@ -47,17 +44,17 @@ }, "devDependencies": { "editorconfig-checker": "4.0.2", - "eslint": "7.24.0", + "eslint": "7.26.0", "eslint-plugin-html": "6.1.2", "eslint-plugin-import": "2.22.1", - "eslint-plugin-unicorn": "30.0.0", + "eslint-plugin-unicorn": "32.0.1", "eslint-plugin-vue": "7.9.0", "jest": "26.6.3", "jest-extended": "0.11.5", - "stylelint": "13.12.0", - "stylelint-config-standard": "21.0.0", + "stylelint": "13.13.1", + "stylelint-config-standard": "22.0.0", "svgo": "2.3.0", - "updates": "12.0.2" + "updates": "12.1.0" }, "browserslist": [ "defaults", diff --git a/public/img/emoji/codeberg.png b/public/img/emoji/codeberg.png new file mode 100644 index 000000000000..b91613833a2f Binary files /dev/null and b/public/img/emoji/codeberg.png differ diff --git a/public/img/emoji/git.png b/public/img/emoji/git.png new file mode 100644 index 000000000000..00a6bcfca4a2 Binary files /dev/null and b/public/img/emoji/git.png differ diff --git a/public/img/emoji/github.png b/public/img/emoji/github.png new file mode 100644 index 000000000000..88efc353e776 Binary files /dev/null and b/public/img/emoji/github.png differ diff --git a/public/img/emoji/gitlab.png b/public/img/emoji/gitlab.png new file mode 100644 index 000000000000..55a0d2b70b9e Binary files /dev/null and b/public/img/emoji/gitlab.png differ diff --git a/public/img/emoji/gogs.png b/public/img/emoji/gogs.png new file mode 100644 index 000000000000..6471a84dadbb Binary files /dev/null and b/public/img/emoji/gogs.png differ diff --git a/public/img/svg/fontawesome-openid.svg b/public/img/svg/fontawesome-openid.svg new file mode 100644 index 000000000000..9bcb92b877ab --- /dev/null +++ b/public/img/svg/fontawesome-openid.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/img/svg/fontawesome-windows.svg b/public/img/svg/fontawesome-windows.svg new file mode 100644 index 000000000000..8e322daf0e6e --- /dev/null +++ b/public/img/svg/fontawesome-windows.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/routers/api/v1/admin/user.go b/routers/api/v1/admin/user.go index 2d4a3815f4ca..6bc9b849b1fc 100644 --- a/routers/api/v1/admin/user.go +++ b/routers/api/v1/admin/user.go @@ -66,6 +66,7 @@ func CreateUser(ctx *context.APIContext) { // "422": // "$ref": "#/responses/validationError" form := web.GetForm(ctx).(*api.CreateUserOption) + u := &models.User{ Name: form.Username, FullName: form.FullName, @@ -88,7 +89,7 @@ func CreateUser(ctx *context.APIContext) { ctx.Error(http.StatusBadRequest, "PasswordComplexity", err) return } - pwned, err := password.IsPwned(ctx.Req.Context(), form.Password) + pwned, err := password.IsPwned(ctx, form.Password) if pwned { if err != nil { log.Error(err.Error()) @@ -97,7 +98,15 @@ func CreateUser(ctx *context.APIContext) { ctx.Error(http.StatusBadRequest, "PasswordPwned", errors.New("PasswordPwned")) return } - if err := models.CreateUser(u); err != nil { + + var overwriteDefault *models.CreateUserOverwriteOptions + if form.Visibility != "" { + overwriteDefault = &models.CreateUserOverwriteOptions{ + Visibility: api.VisibilityModes[form.Visibility], + } + } + + if err := models.CreateUser(u, overwriteDefault); err != nil { if models.IsErrUserAlreadyExist(err) || models.IsErrEmailAlreadyUsed(err) || models.IsErrNameReserved(err) || @@ -162,7 +171,7 @@ func EditUser(ctx *context.APIContext) { ctx.Error(http.StatusBadRequest, "PasswordComplexity", err) return } - pwned, err := password.IsPwned(ctx.Req.Context(), form.Password) + pwned, err := password.IsPwned(ctx, form.Password) if pwned { if err != nil { log.Error(err.Error()) @@ -209,6 +218,9 @@ func EditUser(ctx *context.APIContext) { if form.Active != nil { u.IsActive = *form.Active } + if len(form.Visibility) != 0 { + u.Visibility = api.VisibilityModes[form.Visibility] + } if form.Admin != nil { u.IsAdmin = *form.Admin } @@ -395,6 +407,7 @@ func GetAllUsers(ctx *context.APIContext) { listOptions := utils.GetListOptions(ctx) users, maxResults, err := models.SearchUsers(&models.SearchUserOptions{ + Actor: ctx.User, Type: models.UserTypeIndividual, OrderBy: models.SearchOrderByAlphabetically, ListOptions: listOptions, diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go index ecb3a3f03dfe..4258ea5dc300 100644 --- a/routers/api/v1/api.go +++ b/routers/api/v1/api.go @@ -83,6 +83,7 @@ import ( "code.gitea.io/gitea/routers/api/v1/settings" _ "code.gitea.io/gitea/routers/api/v1/swagger" // for swagger generation "code.gitea.io/gitea/routers/api/v1/user" + "code.gitea.io/gitea/services/auth" "code.gitea.io/gitea/services/forms" "gitea.com/go-chi/binding" @@ -557,6 +558,7 @@ func Routes() *web.Route { Gclifetime: setting.SessionConfig.Gclifetime, Maxlifetime: setting.SessionConfig.Maxlifetime, Secure: setting.SessionConfig.Secure, + SameSite: setting.SessionConfig.SameSite, Domain: setting.SessionConfig.Domain, })) m.Use(securityHeaders()) @@ -572,9 +574,8 @@ func Routes() *web.Route { } m.Use(context.APIContexter()) - if setting.EnableAccessLog { - m.Use(context.AccessLogger()) - } + // Get user from session if logged in. + m.Use(context.APIAuth(auth.NewGroup(auth.Methods()...))) m.Use(context.ToggleAPI(&context.ToggleOptions{ SignInRequired: setting.Service.RequireSignInView, @@ -648,6 +649,10 @@ func Routes() *web.Route { m.Group("/user", func() { m.Get("", user.GetAuthenticatedUser) + m.Group("/settings", func() { + m.Get("", user.GetUserSettings) + m.Patch("", bind(api.UserSettingsOptions{}), user.UpdateUserSettings) + }, reqToken()) m.Combo("/emails").Get(user.ListEmails). Post(bind(api.CreateEmailOption{}), user.AddEmail). Delete(bind(api.DeleteEmailOption{}), user.DeleteEmail) @@ -681,6 +686,9 @@ func Routes() *web.Route { Delete(user.DeleteGPGKey) }) + m.Get("/gpg_key_token", user.GetVerificationToken) + m.Post("/gpg_key_verify", bind(api.VerifyGPGKeyOption{}), user.VerifyUserGPGKey) + m.Combo("/repos").Get(user.ListMyRepos). Post(bind(api.CreateRepoOption{}), repo.Create) @@ -716,7 +724,8 @@ func Routes() *web.Route { m.Group("/{username}/{reponame}", func() { m.Combo("").Get(reqAnyRepoReader(), repo.Get). Delete(reqToken(), reqOwner(), repo.Delete). - Patch(reqToken(), reqAdmin(), context.RepoRefForAPI, bind(api.EditRepoOption{}), repo.Edit) + Patch(reqToken(), reqAdmin(), bind(api.EditRepoOption{}), repo.Edit) + m.Post("/generate", reqToken(), reqRepoReader(models.UnitTypeCode), bind(api.GenerateRepoOption{}), repo.Generate) m.Post("/transfer", reqOwner(), bind(api.TransferRepoOption{}), repo.Transfer) m.Combo("/notifications"). Get(reqToken(), notify.ListRepoNotifications). @@ -745,6 +754,8 @@ func Routes() *web.Route { Put(reqAdmin(), bind(api.AddCollaboratorOption{}), repo.AddCollaborator). Delete(reqAdmin(), repo.DeleteCollaborator) }, reqToken()) + m.Get("/assignees", reqToken(), reqAnyRepoReader(), repo.GetAssignees) + m.Get("/reviewers", reqToken(), reqAnyRepoReader(), repo.GetReviewers) m.Group("/teams", func() { m.Get("", reqAnyRepoReader(), repo.ListTeams) m.Combo("/{team}").Get(reqAnyRepoReader(), repo.IsTeam). @@ -772,7 +783,9 @@ func Routes() *web.Route { }, reqToken(), reqAdmin()) m.Group("/tags", func() { m.Get("", repo.ListTags) - m.Delete("/{tag}", repo.DeleteTag) + m.Get("/*", repo.GetTag) + m.Post("", reqRepoWriter(models.UnitTypeCode), bind(api.CreateTagOption{}), repo.CreateTag) + m.Delete("/*", repo.DeleteTag) }, reqRepoReader(models.UnitTypeCode), context.ReferencesGitRepo(true)) m.Group("/keys", func() { m.Combo("").Get(repo.ListDeployKeys). @@ -892,10 +905,11 @@ func Routes() *web.Route { Post(reqToken(), mustNotBeArchived, bind(api.CreatePullRequestOption{}), repo.CreatePullRequest) m.Group("/{index}", func() { m.Combo("").Get(repo.GetPullRequest). - Patch(reqToken(), reqRepoWriter(models.UnitTypePullRequests), bind(api.EditPullRequestOption{}), repo.EditPullRequest) + Patch(reqToken(), bind(api.EditPullRequestOption{}), repo.EditPullRequest) m.Get(".diff", repo.DownloadPullDiff) m.Get(".patch", repo.DownloadPullPatch) m.Post("/update", reqToken(), repo.UpdatePullRequest) + m.Get("/commits", repo.GetPullRequestCommits) m.Combo("/merge").Get(repo.IsPullRequestMerged). Post(reqToken(), mustNotBeArchived, bind(forms.MergePullRequestForm{}), repo.MergePullRequest) m.Group("/reviews", func() { @@ -937,7 +951,7 @@ func Routes() *web.Route { m.Get("/refs/*", repo.GetGitRefs) m.Get("/trees/{sha}", context.RepoRefForAPI, repo.GetTree) m.Get("/blobs/{sha}", context.RepoRefForAPI, repo.GetBlob) - m.Get("/tags/{sha}", context.RepoRefForAPI, repo.GetTag) + m.Get("/tags/{sha}", context.RepoRefForAPI, repo.GetAnnotatedTag) }, reqRepoReader(models.UnitTypeCode)) m.Group("/contents", func() { m.Get("", repo.GetContentsList) @@ -985,10 +999,10 @@ func Routes() *web.Route { Delete(reqToken(), reqOrgMembership(), org.ConcealMember) }) m.Group("/teams", func() { - m.Combo("", reqToken()).Get(org.ListTeams). - Post(reqOrgOwnership(), bind(api.CreateTeamOption{}), org.CreateTeam) + m.Get("", org.ListTeams) + m.Post("", reqOrgOwnership(), bind(api.CreateTeamOption{}), org.CreateTeam) m.Get("/search", org.SearchTeam) - }, reqOrgMembership()) + }, reqToken(), reqOrgMembership()) m.Group("/labels", func() { m.Get("", org.ListLabels) m.Post("", reqToken(), reqOrgOwnership(), bind(api.CreateLabelOption{}), org.CreateLabel) diff --git a/routers/api/v1/notify/notifications.go b/routers/api/v1/notify/notifications.go index 71dd7d949267..a5e095a3b5c2 100644 --- a/routers/api/v1/notify/notifications.go +++ b/routers/api/v1/notify/notifications.go @@ -6,10 +6,12 @@ package notify import ( "net/http" + "strings" "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" api "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/routers/api/v1/utils" ) // NewAvailable check if unread notifications exist @@ -22,3 +24,44 @@ func NewAvailable(ctx *context.APIContext) { // "$ref": "#/responses/NotificationCount" ctx.JSON(http.StatusOK, api.NotificationCount{New: models.CountUnread(ctx.User)}) } + +func getFindNotificationOptions(ctx *context.APIContext) *models.FindNotificationOptions { + before, since, err := utils.GetQueryBeforeSince(ctx) + if err != nil { + ctx.Error(http.StatusUnprocessableEntity, "GetQueryBeforeSince", err) + return nil + } + opts := &models.FindNotificationOptions{ + ListOptions: utils.GetListOptions(ctx), + UserID: ctx.User.ID, + UpdatedBeforeUnix: before, + UpdatedAfterUnix: since, + } + if !ctx.QueryBool("all") { + statuses := ctx.QueryStrings("status-types") + opts.Status = statusStringsToNotificationStatuses(statuses, []string{"unread", "pinned"}) + } + + subjectTypes := ctx.QueryStrings("subject-type") + if len(subjectTypes) != 0 { + opts.Source = subjectToSource(subjectTypes) + } + + return opts +} + +func subjectToSource(value []string) (result []models.NotificationSource) { + for _, v := range value { + switch strings.ToLower(v) { + case "issue": + result = append(result, models.NotificationSourceIssue) + case "pull": + result = append(result, models.NotificationSourcePullRequest) + case "commit": + result = append(result, models.NotificationSourceCommit) + case "repository": + result = append(result, models.NotificationSourceRepository) + } + } + return +} diff --git a/routers/api/v1/notify/repo.go b/routers/api/v1/notify/repo.go index 0a75fcd30a1d..af55d1d49c05 100644 --- a/routers/api/v1/notify/repo.go +++ b/routers/api/v1/notify/repo.go @@ -13,7 +13,6 @@ import ( "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/convert" "code.gitea.io/gitea/modules/log" - "code.gitea.io/gitea/routers/api/v1/utils" ) func statusStringToNotificationStatus(status string) models.NotificationStatus { @@ -66,8 +65,7 @@ func ListRepoNotifications(ctx *context.APIContext) { // - name: all // in: query // description: If true, show notifications marked as read. Default value is false - // type: string - // required: false + // type: boolean // - name: status-types // in: query // description: "Show notifications with the provided status types. Options are: unread, read and/or pinned. Defaults to unread & pinned" @@ -75,19 +73,24 @@ func ListRepoNotifications(ctx *context.APIContext) { // collectionFormat: multi // items: // type: string - // required: false + // - name: subject-type + // in: query + // description: "filter notifications by subject type" + // type: array + // collectionFormat: multi + // items: + // type: string + // enum: [issue,pull,commit,repository] // - name: since // in: query // description: Only show notifications updated after the given time. This is a timestamp in RFC 3339 format // type: string // format: date-time - // required: false // - name: before // in: query // description: Only show notifications updated before the given time. This is a timestamp in RFC 3339 format // type: string // format: date-time - // required: false // - name: page // in: query // description: page number of results to return (1-based) @@ -99,24 +102,12 @@ func ListRepoNotifications(ctx *context.APIContext) { // responses: // "200": // "$ref": "#/responses/NotificationThreadList" - - before, since, err := utils.GetQueryBeforeSince(ctx) - if err != nil { - ctx.Error(http.StatusUnprocessableEntity, "GetQueryBeforeSince", err) + opts := getFindNotificationOptions(ctx) + if ctx.Written() { return } - opts := models.FindNotificationOptions{ - ListOptions: utils.GetListOptions(ctx), - UserID: ctx.User.ID, - RepoID: ctx.Repo.Repository.ID, - UpdatedBeforeUnix: before, - UpdatedAfterUnix: since, - } + opts.RepoID = ctx.Repo.Repository.ID - if !ctx.QueryBool("all") { - statuses := ctx.QueryStrings("status-types") - opts.Status = statusStringsToNotificationStatuses(statuses, []string{"unread", "pinned"}) - } nl, err := models.GetNotifications(opts) if err != nil { ctx.InternalServerError(err) @@ -192,7 +183,7 @@ func ReadRepoNotifications(ctx *context.APIContext) { } } - opts := models.FindNotificationOptions{ + opts := &models.FindNotificationOptions{ UserID: ctx.User.ID, RepoID: ctx.Repo.Repository.ID, UpdatedBeforeUnix: lastRead, diff --git a/routers/api/v1/notify/user.go b/routers/api/v1/notify/user.go index e739c6a38dd8..475a541bdc60 100644 --- a/routers/api/v1/notify/user.go +++ b/routers/api/v1/notify/user.go @@ -12,7 +12,6 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/convert" - "code.gitea.io/gitea/routers/api/v1/utils" ) // ListNotifications list users's notification threads @@ -28,8 +27,7 @@ func ListNotifications(ctx *context.APIContext) { // - name: all // in: query // description: If true, show notifications marked as read. Default value is false - // type: string - // required: false + // type: boolean // - name: status-types // in: query // description: "Show notifications with the provided status types. Options are: unread, read and/or pinned. Defaults to unread & pinned." @@ -37,19 +35,24 @@ func ListNotifications(ctx *context.APIContext) { // collectionFormat: multi // items: // type: string - // required: false + // - name: subject-type + // in: query + // description: "filter notifications by subject type" + // type: array + // collectionFormat: multi + // items: + // type: string + // enum: [issue,pull,commit,repository] // - name: since // in: query // description: Only show notifications updated after the given time. This is a timestamp in RFC 3339 format // type: string // format: date-time - // required: false // - name: before // in: query // description: Only show notifications updated before the given time. This is a timestamp in RFC 3339 format // type: string // format: date-time - // required: false // - name: page // in: query // description: page number of results to return (1-based) @@ -61,22 +64,11 @@ func ListNotifications(ctx *context.APIContext) { // responses: // "200": // "$ref": "#/responses/NotificationThreadList" - - before, since, err := utils.GetQueryBeforeSince(ctx) - if err != nil { - ctx.Error(http.StatusUnprocessableEntity, "GetQueryBeforeSince", err) + opts := getFindNotificationOptions(ctx) + if ctx.Written() { return } - opts := models.FindNotificationOptions{ - ListOptions: utils.GetListOptions(ctx), - UserID: ctx.User.ID, - UpdatedBeforeUnix: before, - UpdatedAfterUnix: since, - } - if !ctx.QueryBool("all") { - statuses := ctx.QueryStrings("status-types") - opts.Status = statusStringsToNotificationStatuses(statuses, []string{"unread", "pinned"}) - } + nl, err := models.GetNotifications(opts) if err != nil { ctx.InternalServerError(err) @@ -141,7 +133,7 @@ func ReadNotifications(ctx *context.APIContext) { lastRead = tmpLastRead.Unix() } } - opts := models.FindNotificationOptions{ + opts := &models.FindNotificationOptions{ UserID: ctx.User.ID, UpdatedBeforeUnix: lastRead, } diff --git a/routers/api/v1/org/org.go b/routers/api/v1/org/org.go index e0f36aa1e657..5c16594f89d1 100644 --- a/routers/api/v1/org/org.go +++ b/routers/api/v1/org/org.go @@ -225,8 +225,8 @@ func Get(ctx *context.APIContext) { // "200": // "$ref": "#/responses/Organization" - if !models.HasOrgVisible(ctx.Org.Organization, ctx.User) { - ctx.NotFound("HasOrgVisible", nil) + if !models.HasOrgOrUserVisible(ctx.Org.Organization, ctx.User) { + ctx.NotFound("HasOrgOrUserVisible", nil) return } ctx.JSON(http.StatusOK, convert.ToOrganization(ctx.Org.Organization)) @@ -264,7 +264,13 @@ func Edit(ctx *context.APIContext) { if form.Visibility != "" { org.Visibility = api.VisibilityModes[form.Visibility] } - if err := models.UpdateUserCols(org, "full_name", "description", "website", "location", "visibility"); err != nil { + if form.RepoAdminChangeTeamAccess != nil { + org.RepoAdminChangeTeamAccess = *form.RepoAdminChangeTeamAccess + } + if err := models.UpdateUserCols(org, + "full_name", "description", "website", "location", + "visibility", "repo_admin_change_team_access", + ); err != nil { ctx.Error(http.StatusInternalServerError, "EditOrganization", err) return } diff --git a/routers/api/v1/repo/branch.go b/routers/api/v1/repo/branch.go index 451fdcf516f0..85c1681dfec1 100644 --- a/routers/api/v1/repo/branch.go +++ b/routers/api/v1/repo/branch.go @@ -6,6 +6,7 @@ package repo import ( + "errors" "fmt" "net/http" @@ -13,7 +14,6 @@ import ( "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/convert" "code.gitea.io/gitea/modules/git" - "code.gitea.io/gitea/modules/log" repo_module "code.gitea.io/gitea/modules/repository" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/web" @@ -117,62 +117,20 @@ func DeleteBranch(ctx *context.APIContext) { branchName := ctx.Params("*") - if ctx.Repo.Repository.DefaultBranch == branchName { - ctx.Error(http.StatusForbidden, "DefaultBranch", fmt.Errorf("can not delete default branch")) - return - } - - isProtected, err := ctx.Repo.Repository.IsProtectedBranch(branchName, ctx.User) - if err != nil { - ctx.InternalServerError(err) - return - } - if isProtected { - ctx.Error(http.StatusForbidden, "IsProtectedBranch", fmt.Errorf("branch protected")) - return - } - - branch, err := repo_module.GetBranch(ctx.Repo.Repository, branchName) - if err != nil { - if git.IsErrBranchNotExist(err) { + if err := repo_service.DeleteBranch(ctx.User, ctx.Repo.Repository, ctx.Repo.GitRepo, branchName); err != nil { + switch { + case git.IsErrBranchNotExist(err): ctx.NotFound(err) - } else { - ctx.Error(http.StatusInternalServerError, "GetBranch", err) + case errors.Is(err, repo_service.ErrBranchIsDefault): + ctx.Error(http.StatusForbidden, "DefaultBranch", fmt.Errorf("can not delete default branch")) + case errors.Is(err, repo_service.ErrBranchIsProtected): + ctx.Error(http.StatusForbidden, "IsProtectedBranch", fmt.Errorf("branch protected")) + default: + ctx.Error(http.StatusInternalServerError, "DeleteBranch", err) } return } - c, err := branch.GetCommit() - if err != nil { - ctx.Error(http.StatusInternalServerError, "GetCommit", err) - return - } - - if err := ctx.Repo.GitRepo.DeleteBranch(branchName, git.DeleteBranchOptions{ - Force: true, - }); err != nil { - ctx.Error(http.StatusInternalServerError, "DeleteBranch", err) - return - } - - // Don't return error below this - if err := repo_service.PushUpdate( - &repo_module.PushUpdateOptions{ - RefFullName: git.BranchPrefix + branchName, - OldCommitID: c.ID.String(), - NewCommitID: git.EmptySHA, - PusherID: ctx.User.ID, - PusherName: ctx.User.Name, - RepoUserName: ctx.Repo.Owner.Name, - RepoName: ctx.Repo.Repository.Name, - }); err != nil { - log.Error("Update: %v", err) - } - - if err := ctx.Repo.Repository.AddDeletedBranch(branchName, c.ID.String(), ctx.User.ID); err != nil { - log.Warn("AddDeletedBranch: %v", err) - } - ctx.Status(http.StatusNoContent) } diff --git a/routers/api/v1/repo/collaborators.go b/routers/api/v1/repo/collaborators.go index d0936019fadd..078af1f6ff8e 100644 --- a/routers/api/v1/repo/collaborators.go +++ b/routers/api/v1/repo/collaborators.go @@ -221,3 +221,63 @@ func DeleteCollaborator(ctx *context.APIContext) { } ctx.Status(http.StatusNoContent) } + +// GetReviewers return all users that can be requested to review in this repo +func GetReviewers(ctx *context.APIContext) { + // swagger:operation GET /repos/{owner}/{repo}/reviewers repository repoGetReviewers + // --- + // summary: Return all users that can be requested to review in this repo + // produces: + // - application/json + // parameters: + // - name: owner + // in: path + // description: owner of the repo + // type: string + // required: true + // - name: repo + // in: path + // description: name of the repo + // type: string + // required: true + // responses: + // "200": + // "$ref": "#/responses/UserList" + + reviewers, err := ctx.Repo.Repository.GetReviewers(ctx.User.ID, 0) + if err != nil { + ctx.Error(http.StatusInternalServerError, "ListCollaborators", err) + return + } + ctx.JSON(http.StatusOK, convert.ToUsers(ctx.User, reviewers)) +} + +// GetAssignees return all users that have write access and can be assigned to issues +func GetAssignees(ctx *context.APIContext) { + // swagger:operation GET /repos/{owner}/{repo}/assignees repository repoGetAssignees + // --- + // summary: Return all users that have write access and can be assigned to issues + // produces: + // - application/json + // parameters: + // - name: owner + // in: path + // description: owner of the repo + // type: string + // required: true + // - name: repo + // in: path + // description: name of the repo + // type: string + // required: true + // responses: + // "200": + // "$ref": "#/responses/UserList" + + assignees, err := ctx.Repo.Repository.GetAssignees() + if err != nil { + ctx.Error(http.StatusInternalServerError, "ListCollaborators", err) + return + } + ctx.JSON(http.StatusOK, convert.ToUsers(ctx.User, assignees)) +} diff --git a/routers/api/v1/repo/commits.go b/routers/api/v1/repo/commits.go index a16cca0f4e14..9a0fd1d0b6f1 100644 --- a/routers/api/v1/repo/commits.go +++ b/routers/api/v1/repo/commits.go @@ -143,8 +143,8 @@ func GetAllCommits(ctx *context.APIContext) { listOptions.Page = 1 } - if listOptions.PageSize > git.CommitsRangeSize { - listOptions.PageSize = git.CommitsRangeSize + if listOptions.PageSize > setting.Git.CommitsRangeSize { + listOptions.PageSize = setting.Git.CommitsRangeSize } sha := ctx.Query("sha") diff --git a/routers/api/v1/repo/file.go b/routers/api/v1/repo/file.go index 37e02874b4ff..e6427ea4f4d2 100644 --- a/routers/api/v1/repo/file.go +++ b/routers/api/v1/repo/file.go @@ -17,7 +17,8 @@ import ( "code.gitea.io/gitea/modules/repofiles" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/web" - "code.gitea.io/gitea/routers/repo" + "code.gitea.io/gitea/routers/common" + "code.gitea.io/gitea/routers/web/repo" ) // GetRawFile get a file by path on a repository @@ -83,7 +84,7 @@ func GetRawFile(ctx *context.APIContext) { } return } - if err = repo.ServeBlob(ctx.Context, blob); err != nil { + if err = common.ServeBlob(ctx.Context, blob); err != nil { ctx.Error(http.StatusInternalServerError, "ServeBlob", err) } } diff --git a/routers/api/v1/repo/git_ref.go b/routers/api/v1/repo/git_ref.go index bd43ad4fc8ab..e304e06740be 100644 --- a/routers/api/v1/repo/git_ref.go +++ b/routers/api/v1/repo/git_ref.go @@ -8,8 +8,8 @@ import ( "net/http" "code.gitea.io/gitea/modules/context" - "code.gitea.io/gitea/modules/git" api "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/routers/api/v1/utils" ) // GetGitAllRefs get ref or an list all the refs of a repository @@ -73,22 +73,8 @@ func GetGitRefs(ctx *context.APIContext) { getGitRefsInternal(ctx, ctx.Params("*")) } -func getGitRefs(ctx *context.APIContext, filter string) ([]*git.Reference, string, error) { - gitRepo, err := git.OpenRepository(ctx.Repo.Repository.RepoPath()) - if err != nil { - return nil, "OpenRepository", err - } - defer gitRepo.Close() - - if len(filter) > 0 { - filter = "refs/" + filter - } - refs, err := gitRepo.GetRefsFiltered(filter) - return refs, "GetRefsFiltered", err -} - func getGitRefsInternal(ctx *context.APIContext, filter string) { - refs, lastMethodName, err := getGitRefs(ctx, filter) + refs, lastMethodName, err := utils.GetGitRefs(ctx, filter) if err != nil { ctx.Error(http.StatusInternalServerError, lastMethodName, err) return diff --git a/routers/api/v1/repo/hook.go b/routers/api/v1/repo/hook.go index 5a0911544a05..da0a2c501c8a 100644 --- a/routers/api/v1/repo/hook.go +++ b/routers/api/v1/repo/hook.go @@ -140,16 +140,17 @@ func TestHook(ctx *context.APIContext) { return } + commit := convert.ToPayloadCommit(ctx.Repo.Repository, ctx.Repo.Commit) + if err := webhook.PrepareWebhook(hook, ctx.Repo.Repository, models.HookEventPush, &api.PushPayload{ - Ref: git.BranchPrefix + ctx.Repo.Repository.DefaultBranch, - Before: ctx.Repo.Commit.ID.String(), - After: ctx.Repo.Commit.ID.String(), - Commits: []*api.PayloadCommit{ - convert.ToPayloadCommit(ctx.Repo.Repository, ctx.Repo.Commit), - }, - Repo: convert.ToRepo(ctx.Repo.Repository, models.AccessModeNone), - Pusher: convert.ToUserWithAccessMode(ctx.User, models.AccessModeNone), - Sender: convert.ToUserWithAccessMode(ctx.User, models.AccessModeNone), + Ref: git.BranchPrefix + ctx.Repo.Repository.DefaultBranch, + Before: ctx.Repo.Commit.ID.String(), + After: ctx.Repo.Commit.ID.String(), + Commits: []*api.PayloadCommit{commit}, + HeadCommit: commit, + Repo: convert.ToRepo(ctx.Repo.Repository, models.AccessModeNone), + Pusher: convert.ToUserWithAccessMode(ctx.User, models.AccessModeNone), + Sender: convert.ToUserWithAccessMode(ctx.User, models.AccessModeNone), }); err != nil { ctx.Error(http.StatusInternalServerError, "PrepareWebhook: ", err) return diff --git a/routers/api/v1/repo/issue.go b/routers/api/v1/repo/issue.go index 6b46dc0fef53..5a7d10b36f13 100644 --- a/routers/api/v1/repo/issue.go +++ b/routers/api/v1/repo/issue.go @@ -42,6 +42,10 @@ func SearchIssues(ctx *context.APIContext) { // in: query // description: comma separated list of labels. Fetch only issues that have any of this labels. Non existent labels are discarded // type: string + // - name: milestones + // in: query + // description: comma separated list of milestone names. Fetch only issues that have any of this milestones. Non existent are discarded + // type: string // - name: q // in: query // description: search string @@ -164,6 +168,12 @@ func SearchIssues(ctx *context.APIContext) { includedLabelNames = strings.Split(labels, ",") } + milestones := strings.TrimSpace(ctx.Query("milestones")) + var includedMilestones []string + if len(milestones) > 0 { + includedMilestones = strings.Split(milestones, ",") + } + // this api is also used in UI, // so the default limit is set to fit UI needs limit := ctx.QueryInt("limit") @@ -175,7 +185,7 @@ func SearchIssues(ctx *context.APIContext) { // Only fetch the issues if we either don't have a keyword or the search returned issues // This would otherwise return all issues if no issues were found by the search. - if len(keyword) == 0 || len(issueIDs) > 0 || len(includedLabelNames) > 0 { + if len(keyword) == 0 || len(issueIDs) > 0 || len(includedLabelNames) > 0 || len(includedMilestones) > 0 { issuesOpt := &models.IssuesOptions{ ListOptions: models.ListOptions{ Page: ctx.QueryInt("page"), @@ -185,6 +195,7 @@ func SearchIssues(ctx *context.APIContext) { IsClosed: isClosed, IssueIDs: issueIDs, IncludedLabelNames: includedLabelNames, + IncludeMilestones: includedMilestones, SortType: "priorityrepo", PriorityRepoID: ctx.QueryInt64("priority_repo_id"), IsPull: isPull, @@ -266,6 +277,30 @@ func ListIssues(ctx *context.APIContext) { // in: query // description: comma separated list of milestone names or ids. It uses names and fall back to ids. Fetch only issues that have any of this milestones. Non existent milestones are discarded // type: string + // - name: since + // in: query + // description: Only show notifications updated after the given time. This is a timestamp in RFC 3339 format + // type: string + // format: date-time + // required: false + // - name: before + // in: query + // description: Only show notifications updated before the given time. This is a timestamp in RFC 3339 format + // type: string + // format: date-time + // required: false + // - name: created_by + // in: query + // description: filter (issues / pulls) created to + // type: string + // - name: assigned_by + // in: query + // description: filter (issues / pulls) assigned to + // type: string + // - name: mentioned_by + // in: query + // description: filter (issues / pulls) mentioning to + // type: string // - name: page // in: query // description: page number of results to return (1-based) @@ -277,6 +312,11 @@ func ListIssues(ctx *context.APIContext) { // responses: // "200": // "$ref": "#/responses/IssueList" + before, since, err := utils.GetQueryBeforeSince(ctx) + if err != nil { + ctx.Error(http.StatusUnprocessableEntity, "GetQueryBeforeSince", err) + return + } var isClosed util.OptionalBool switch ctx.Query("state") { @@ -297,7 +337,6 @@ func ListIssues(ctx *context.APIContext) { } var issueIDs []int64 var labelIDs []int64 - var err error if len(keyword) > 0 { issueIDs, err = issue_indexer.SearchIssuesByKeyword([]int64{ctx.Repo.Repository.ID}, keyword) if err != nil { @@ -356,17 +395,36 @@ func ListIssues(ctx *context.APIContext) { isPull = util.OptionalBoolNone } + // FIXME: we should be more efficient here + createdByID := getUserIDForFilter(ctx, "created_by") + if ctx.Written() { + return + } + assignedByID := getUserIDForFilter(ctx, "assigned_by") + if ctx.Written() { + return + } + mentionedByID := getUserIDForFilter(ctx, "mentioned_by") + if ctx.Written() { + return + } + // Only fetch the issues if we either don't have a keyword or the search returned issues // This would otherwise return all issues if no issues were found by the search. if len(keyword) == 0 || len(issueIDs) > 0 || len(labelIDs) > 0 { issuesOpt := &models.IssuesOptions{ - ListOptions: listOptions, - RepoIDs: []int64{ctx.Repo.Repository.ID}, - IsClosed: isClosed, - IssueIDs: issueIDs, - LabelIDs: labelIDs, - MilestoneIDs: mileIDs, - IsPull: isPull, + ListOptions: listOptions, + RepoIDs: []int64{ctx.Repo.Repository.ID}, + IsClosed: isClosed, + IssueIDs: issueIDs, + LabelIDs: labelIDs, + MilestoneIDs: mileIDs, + IsPull: isPull, + UpdatedBeforeUnix: before, + UpdatedAfterUnix: since, + PosterID: createdByID, + AssigneeID: assignedByID, + MentionedID: mentionedByID, } if issues, err = models.Issues(issuesOpt); err != nil { @@ -389,6 +447,26 @@ func ListIssues(ctx *context.APIContext) { ctx.JSON(http.StatusOK, convert.ToAPIIssueList(issues)) } +func getUserIDForFilter(ctx *context.APIContext, queryName string) int64 { + userName := ctx.Query(queryName) + if len(userName) == 0 { + return 0 + } + + user, err := models.GetUserByName(userName) + if models.IsErrUserNotExist(err) { + ctx.NotFound(err) + return 0 + } + + if err != nil { + ctx.InternalServerError(err) + return 0 + } + + return user.ID +} + // GetIssue get an issue of a repository func GetIssue(ctx *context.APIContext) { // swagger:operation GET /repos/{owner}/{repo}/issues/{index} issue issueGetIssue diff --git a/routers/api/v1/repo/issue_tracked_time.go b/routers/api/v1/repo/issue_tracked_time.go index 642704800b5c..ad774b563bcc 100644 --- a/routers/api/v1/repo/issue_tracked_time.go +++ b/routers/api/v1/repo/issue_tracked_time.go @@ -526,7 +526,7 @@ func ListTrackedTimesByRepository(ctx *context.APIContext) { if opts.UserID == 0 { opts.UserID = ctx.User.ID } else { - ctx.Error(http.StatusForbidden, "", fmt.Errorf("query user not allowed not enouth rights")) + ctx.Error(http.StatusForbidden, "", fmt.Errorf("query by user not allowed; not enough rights")) return } } diff --git a/routers/api/v1/repo/migrate.go b/routers/api/v1/repo/migrate.go index edae358338fc..de33a3645b75 100644 --- a/routers/api/v1/repo/migrate.go +++ b/routers/api/v1/repo/migrate.go @@ -199,7 +199,7 @@ func Migrate(ctx *context.APIContext) { } }() - if _, err = migrations.MigrateRepository(graceful.GetManager().HammerContext(), ctx.User, repoOwner.Name, opts); err != nil { + if _, err = migrations.MigrateRepository(graceful.GetManager().HammerContext(), ctx.User, repoOwner.Name, opts, nil); err != nil { handleMigrateError(ctx, repoOwner, remoteAddr, err) return } @@ -231,7 +231,7 @@ func handleMigrateError(ctx *context.APIContext, repoOwner *models.User, remoteA case base.IsErrNotSupported(err): ctx.Error(http.StatusUnprocessableEntity, "", err) default: - err = util.URLSanitizedError(err, remoteAddr) + err = util.NewStringURLSanitizedError(err, remoteAddr, true) if strings.Contains(err.Error(), "Authentication failed") || strings.Contains(err.Error(), "Bad credentials") || strings.Contains(err.Error(), "could not read Username") { diff --git a/routers/api/v1/repo/pull.go b/routers/api/v1/repo/pull.go index eff998ee996a..66bcabfd38c4 100644 --- a/routers/api/v1/repo/pull.go +++ b/routers/api/v1/repo/pull.go @@ -5,8 +5,11 @@ package repo import ( + "errors" "fmt" + "math" "net/http" + "strconv" "strings" "time" @@ -23,6 +26,7 @@ import ( "code.gitea.io/gitea/services/forms" issue_service "code.gitea.io/gitea/services/issue" pull_service "code.gitea.io/gitea/services/pull" + repo_service "code.gitea.io/gitea/services/repository" ) // ListPullRequests returns a list of all PRs @@ -876,6 +880,38 @@ func MergePullRequest(ctx *context.APIContext) { } log.Trace("Pull request merged: %d", pr.ID) + + if form.DeleteBranchAfterMerge { + var headRepo *git.Repository + if ctx.Repo != nil && ctx.Repo.Repository != nil && ctx.Repo.Repository.ID == pr.HeadRepoID && ctx.Repo.GitRepo != nil { + headRepo = ctx.Repo.GitRepo + } else { + headRepo, err = git.OpenRepository(pr.HeadRepo.RepoPath()) + if err != nil { + ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.HeadRepo.RepoPath()), err) + return + } + defer headRepo.Close() + } + if err := repo_service.DeleteBranch(ctx.User, pr.HeadRepo, headRepo, pr.HeadBranch); err != nil { + switch { + case git.IsErrBranchNotExist(err): + ctx.NotFound(err) + case errors.Is(err, repo_service.ErrBranchIsDefault): + ctx.Error(http.StatusForbidden, "DefaultBranch", fmt.Errorf("can not delete default branch")) + case errors.Is(err, repo_service.ErrBranchIsProtected): + ctx.Error(http.StatusForbidden, "IsProtectedBranch", fmt.Errorf("branch protected")) + default: + ctx.Error(http.StatusInternalServerError, "DeleteBranch", err) + } + return + } + if err := models.AddDeletePRBranchComment(ctx.User, pr.BaseRepo, pr.Issue.ID, pr.HeadBranch); err != nil { + // Do not fail here as branch has already been deleted + log.Error("DeleteBranch: %v", err) + } + } + ctx.Status(http.StatusOK) } @@ -1101,3 +1137,122 @@ func UpdatePullRequest(ctx *context.APIContext) { ctx.Status(http.StatusOK) } + +// GetPullRequestCommits gets all commits associated with a given PR +func GetPullRequestCommits(ctx *context.APIContext) { + // swagger:operation GET /repos/{owner}/{repo}/pulls/{index}/commits repository repoGetPullRequestCommits + // --- + // summary: Get commits for a pull request + // produces: + // - application/json + // parameters: + // - name: owner + // in: path + // description: owner of the repo + // type: string + // required: true + // - name: repo + // in: path + // description: name of the repo + // type: string + // required: true + // - name: index + // in: path + // description: index of the pull request to get + // type: integer + // format: int64 + // required: true + // - name: page + // in: query + // description: page number of results to return (1-based) + // type: integer + // - name: limit + // in: query + // description: page size of results + // type: integer + // responses: + // "200": + // "$ref": "#/responses/CommitList" + // "404": + // "$ref": "#/responses/notFound" + + pr, err := models.GetPullRequestByIndex(ctx.Repo.Repository.ID, ctx.ParamsInt64(":index")) + if err != nil { + if models.IsErrPullRequestNotExist(err) { + ctx.NotFound() + } else { + ctx.Error(http.StatusInternalServerError, "GetPullRequestByIndex", err) + } + return + } + + if err := pr.LoadBaseRepo(); err != nil { + ctx.InternalServerError(err) + return + } + + var prInfo *git.CompareInfo + baseGitRepo, err := git.OpenRepository(pr.BaseRepo.RepoPath()) + if err != nil { + ctx.ServerError("OpenRepository", err) + return + } + defer baseGitRepo.Close() + if pr.HasMerged { + prInfo, err = baseGitRepo.GetCompareInfo(pr.BaseRepo.RepoPath(), pr.MergeBase, pr.GetGitRefName()) + } else { + prInfo, err = baseGitRepo.GetCompareInfo(pr.BaseRepo.RepoPath(), pr.BaseBranch, pr.GetGitRefName()) + } + if err != nil { + ctx.ServerError("GetCompareInfo", err) + return + } + commits := prInfo.Commits + + listOptions := utils.GetListOptions(ctx) + + totalNumberOfCommits := commits.Len() + totalNumberOfPages := int(math.Ceil(float64(totalNumberOfCommits) / float64(listOptions.PageSize))) + + userCache := make(map[string]*models.User) + + start, end := listOptions.GetStartEnd() + + if end > totalNumberOfCommits { + end = totalNumberOfCommits + } + + apiCommits := make([]*api.Commit, end-start) + + i := 0 + addedCommitsCount := 0 + for commitPointer := commits.Front(); commitPointer != nil; commitPointer = commitPointer.Next() { + if i < start { + i++ + continue + } + if i >= end { + break + } + + commit := commitPointer.Value.(*git.Commit) + + // Create json struct + apiCommits[addedCommitsCount], err = convert.ToCommit(ctx.Repo.Repository, commit, userCache) + addedCommitsCount++ + if err != nil { + ctx.ServerError("toCommit", err) + return + } + i++ + } + + ctx.SetLinkHeader(int(totalNumberOfCommits), listOptions.PageSize) + + ctx.Header().Set("X-Page", strconv.Itoa(listOptions.Page)) + ctx.Header().Set("X-PerPage", strconv.Itoa(listOptions.PageSize)) + ctx.Header().Set("X-Total-Count", fmt.Sprintf("%d", totalNumberOfCommits)) + ctx.Header().Set("X-PageCount", strconv.Itoa(totalNumberOfPages)) + ctx.Header().Set("X-HasMore", strconv.FormatBool(listOptions.Page < totalNumberOfPages)) + ctx.JSON(http.StatusOK, &apiCommits) +} diff --git a/routers/api/v1/repo/pull_review.go b/routers/api/v1/repo/pull_review.go index 63179aa9907d..323904f45c0e 100644 --- a/routers/api/v1/repo/pull_review.go +++ b/routers/api/v1/repo/pull_review.go @@ -307,7 +307,7 @@ func CreatePullReview(ctx *context.APIContext) { } // determine review type - reviewType, isWrong := preparePullReviewType(ctx, pr, opts.Event, opts.Body) + reviewType, isWrong := preparePullReviewType(ctx, pr, opts.Event, opts.Body, len(opts.Comments) > 0) if isWrong { return } @@ -359,7 +359,7 @@ func CreatePullReview(ctx *context.APIContext) { } // create review and associate all pending review comments - review, _, err := pull_service.SubmitReview(ctx.User, ctx.Repo.GitRepo, pr.Issue, reviewType, opts.Body, opts.CommitID) + review, _, err := pull_service.SubmitReview(ctx.User, ctx.Repo.GitRepo, pr.Issue, reviewType, opts.Body, opts.CommitID, nil) if err != nil { ctx.Error(http.StatusInternalServerError, "SubmitReview", err) return @@ -429,7 +429,7 @@ func SubmitPullReview(ctx *context.APIContext) { } // determine review type - reviewType, isWrong := preparePullReviewType(ctx, pr, opts.Event, opts.Body) + reviewType, isWrong := preparePullReviewType(ctx, pr, opts.Event, opts.Body, len(review.Comments) > 0) if isWrong { return } @@ -447,7 +447,7 @@ func SubmitPullReview(ctx *context.APIContext) { } // create review and associate all pending review comments - review, _, err = pull_service.SubmitReview(ctx.User, ctx.Repo.GitRepo, pr.Issue, reviewType, opts.Body, headCommitID) + review, _, err = pull_service.SubmitReview(ctx.User, ctx.Repo.GitRepo, pr.Issue, reviewType, opts.Body, headCommitID, nil) if err != nil { ctx.Error(http.StatusInternalServerError, "SubmitReview", err) return @@ -463,12 +463,15 @@ func SubmitPullReview(ctx *context.APIContext) { } // preparePullReviewType return ReviewType and false or nil and true if an error happen -func preparePullReviewType(ctx *context.APIContext, pr *models.PullRequest, event api.ReviewStateType, body string) (models.ReviewType, bool) { +func preparePullReviewType(ctx *context.APIContext, pr *models.PullRequest, event api.ReviewStateType, body string, hasComments bool) (models.ReviewType, bool) { if err := pr.LoadIssue(); err != nil { ctx.Error(http.StatusInternalServerError, "LoadIssue", err) return -1, true } + needsBody := true + hasBody := len(strings.TrimSpace(body)) > 0 + var reviewType models.ReviewType switch event { case api.ReviewStateApproved: @@ -478,6 +481,7 @@ func preparePullReviewType(ctx *context.APIContext, pr *models.PullRequest, even return -1, true } reviewType = models.ReviewTypeApprove + needsBody = false case api.ReviewStateRequestChanges: // can not reject your own PR @@ -489,13 +493,19 @@ func preparePullReviewType(ctx *context.APIContext, pr *models.PullRequest, even case api.ReviewStateComment: reviewType = models.ReviewTypeComment + needsBody = false + // if there is no body we need to ensure that there are comments + if !hasBody && !hasComments { + ctx.Error(http.StatusUnprocessableEntity, "", fmt.Errorf("review event %s requires a body or a comment", event)) + return -1, true + } default: reviewType = models.ReviewTypePending } - // reject reviews with empty body if not approve type - if reviewType != models.ReviewTypeApprove && len(strings.TrimSpace(body)) == 0 { - ctx.Error(http.StatusUnprocessableEntity, "", fmt.Errorf("review event %s need body", event)) + // reject reviews with empty body if a body is required for this call + if needsBody && !hasBody { + ctx.Error(http.StatusUnprocessableEntity, "", fmt.Errorf("review event %s requires a body", event)) return -1, true } diff --git a/routers/api/v1/repo/release.go b/routers/api/v1/repo/release.go index 327a2d790b6b..1b52de55ffff 100644 --- a/routers/api/v1/repo/release.go +++ b/routers/api/v1/repo/release.go @@ -5,6 +5,7 @@ package repo import ( + "fmt" "net/http" "code.gitea.io/gitea/models" @@ -83,6 +84,14 @@ func ListReleases(ctx *context.APIContext) { // description: name of the repo // type: string // required: true + // - name: draft + // in: query + // description: filter (exclude / include) drafts, if you dont have repo write access none will show + // type: boolean + // - name: pre-release + // in: query + // description: filter (exclude / include) pre-releases + // type: boolean // - name: per_page // in: query // description: page size of results, deprecated - use limit @@ -100,15 +109,19 @@ func ListReleases(ctx *context.APIContext) { // "200": // "$ref": "#/responses/ReleaseList" listOptions := utils.GetListOptions(ctx) - if ctx.QueryInt("per_page") != 0 { + if listOptions.PageSize == 0 && ctx.QueryInt("per_page") != 0 { listOptions.PageSize = ctx.QueryInt("per_page") } - releases, err := models.GetReleasesByRepoID(ctx.Repo.Repository.ID, models.FindReleasesOptions{ + opts := models.FindReleasesOptions{ ListOptions: listOptions, IncludeDrafts: ctx.Repo.AccessMode >= models.AccessModeWrite, IncludeTags: false, - }) + IsDraft: ctx.QueryOptionalBool("draft"), + IsPreRelease: ctx.QueryOptionalBool("pre-release"), + } + + releases, err := models.GetReleasesByRepoID(ctx.Repo.Repository.ID, opts) if err != nil { ctx.Error(http.StatusInternalServerError, "GetReleasesByRepoID", err) return @@ -121,6 +134,16 @@ func ListReleases(ctx *context.APIContext) { } rels[i] = convert.ToRelease(release) } + + filteredCount, err := models.CountReleasesByRepoID(ctx.Repo.Repository.ID, opts) + if err != nil { + ctx.InternalServerError(err) + return + } + + ctx.SetLinkHeader(int(filteredCount), listOptions.PageSize) + ctx.Header().Set("X-Total-Count", fmt.Sprint(filteredCount)) + ctx.Header().Set("Access-Control-Expose-Headers", "X-Total-Count, Link") ctx.JSON(http.StatusOK, rels) } diff --git a/routers/api/v1/repo/repo.go b/routers/api/v1/repo/repo.go index c422feb04327..b671ef2435ae 100644 --- a/routers/api/v1/repo/repo.go +++ b/routers/api/v1/repo/repo.go @@ -307,6 +307,115 @@ func Create(ctx *context.APIContext) { CreateUserRepo(ctx, ctx.User, *opt) } +// Generate Create a repository using a template +func Generate(ctx *context.APIContext) { + // swagger:operation POST /repos/{template_owner}/{template_repo}/generate repository generateRepo + // --- + // summary: Create a repository using a template + // consumes: + // - application/json + // produces: + // - application/json + // parameters: + // - name: template_owner + // in: path + // description: name of the template repository owner + // type: string + // required: true + // - name: template_repo + // in: path + // description: name of the template repository + // type: string + // required: true + // - name: body + // in: body + // schema: + // "$ref": "#/definitions/GenerateRepoOption" + // responses: + // "201": + // "$ref": "#/responses/Repository" + // "403": + // "$ref": "#/responses/forbidden" + // "404": + // "$ref": "#/responses/notFound" + // "409": + // description: The repository with the same name already exists. + // "422": + // "$ref": "#/responses/validationError" + form := web.GetForm(ctx).(*api.GenerateRepoOption) + + if !ctx.Repo.Repository.IsTemplate { + ctx.Error(http.StatusUnprocessableEntity, "", "this is not a template repo") + return + } + + if ctx.User.IsOrganization() { + ctx.Error(http.StatusUnprocessableEntity, "", "not allowed creating repository for organization") + return + } + + opts := models.GenerateRepoOptions{ + Name: form.Name, + Description: form.Description, + Private: form.Private, + GitContent: form.GitContent, + Topics: form.Topics, + GitHooks: form.GitHooks, + Webhooks: form.Webhooks, + Avatar: form.Avatar, + IssueLabels: form.Labels, + } + + if !opts.IsValid() { + ctx.Error(http.StatusUnprocessableEntity, "", "must select at least one template item") + return + } + + ctxUser := ctx.User + var err error + if form.Owner != ctxUser.Name { + ctxUser, err = models.GetOrgByName(form.Owner) + if err != nil { + if models.IsErrOrgNotExist(err) { + ctx.JSON(http.StatusNotFound, map[string]interface{}{ + "error": "request owner `" + form.Name + "` is not exist", + }) + return + } + + ctx.Error(http.StatusInternalServerError, "GetOrgByName", err) + return + } + + if !ctx.User.IsAdmin { + canCreate, err := ctxUser.CanCreateOrgRepo(ctx.User.ID) + if err != nil { + ctx.ServerError("CanCreateOrgRepo", err) + return + } else if !canCreate { + ctx.Error(http.StatusForbidden, "", "Given user is not allowed to create repository in organization.") + return + } + } + } + + repo, err := repo_service.GenerateRepository(ctx.User, ctxUser, ctx.Repo.Repository, opts) + if err != nil { + if models.IsErrRepoAlreadyExist(err) { + ctx.Error(http.StatusConflict, "", "The repository with the same name already exists.") + } else if models.IsErrNameReserved(err) || + models.IsErrNamePatternNotAllowed(err) { + ctx.Error(http.StatusUnprocessableEntity, "", err) + } else { + ctx.Error(http.StatusInternalServerError, "CreateRepository", err) + } + return + } + log.Trace("Repository generated [%d]: %s/%s", repo.ID, ctxUser.Name, repo.Name) + + ctx.JSON(http.StatusCreated, convert.ToRepo(repo, models.AccessModeOwner)) +} + // CreateOrgRepoDeprecated create one repository of the organization func CreateOrgRepoDeprecated(ctx *context.APIContext) { // swagger:operation POST /org/{org}/repos organization createOrgRepoDeprecated @@ -375,8 +484,8 @@ func CreateOrgRepo(ctx *context.APIContext) { return } - if !models.HasOrgVisible(org, ctx.User) { - ctx.NotFound("HasOrgVisible", nil) + if !models.HasOrgOrUserVisible(org, ctx.User) { + ctx.NotFound("HasOrgOrUserVisible", nil) return } @@ -511,7 +620,13 @@ func Edit(ctx *context.APIContext) { } } - ctx.JSON(http.StatusOK, convert.ToRepo(ctx.Repo.Repository, ctx.Repo.AccessMode)) + repo, err := models.GetRepositoryByID(ctx.Repo.Repository.ID) + if err != nil { + ctx.InternalServerError(err) + return + } + + ctx.JSON(http.StatusOK, convert.ToRepo(repo, ctx.Repo.AccessMode)) } // updateBasicProperties updates the basic properties of a repo: Name, Description, Website and Visibility @@ -578,7 +693,7 @@ func updateBasicProperties(ctx *context.APIContext, opts api.EditRepoOption) err repo.IsTemplate = *opts.Template } - if ctx.Repo.GitRepo == nil { + if ctx.Repo.GitRepo == nil && !repo.IsEmpty { var err error ctx.Repo.GitRepo, err = git.OpenRepository(ctx.Repo.Repository.RepoPath()) if err != nil { @@ -589,13 +704,13 @@ func updateBasicProperties(ctx *context.APIContext, opts api.EditRepoOption) err } // Default branch only updated if changed and exist or the repository is empty - if opts.DefaultBranch != nil && - repo.DefaultBranch != *opts.DefaultBranch && - (ctx.Repo.Repository.IsEmpty || ctx.Repo.GitRepo.IsBranchExist(*opts.DefaultBranch)) { - if err := ctx.Repo.GitRepo.SetDefaultBranch(*opts.DefaultBranch); err != nil { - if !git.IsErrUnsupportedVersion(err) { - ctx.Error(http.StatusInternalServerError, "SetDefaultBranch", err) - return err + if opts.DefaultBranch != nil && repo.DefaultBranch != *opts.DefaultBranch && (repo.IsEmpty || ctx.Repo.GitRepo.IsBranchExist(*opts.DefaultBranch)) { + if !repo.IsEmpty { + if err := ctx.Repo.GitRepo.SetDefaultBranch(*opts.DefaultBranch); err != nil { + if !git.IsErrUnsupportedVersion(err) { + ctx.Error(http.StatusInternalServerError, "SetDefaultBranch", err) + return err + } } } repo.DefaultBranch = *opts.DefaultBranch @@ -724,14 +839,15 @@ func updateRepoUnits(ctx *context.APIContext, opts api.EditRepoOption) error { if err != nil { // Unit type doesn't exist so we make a new config file with default values config = &models.PullRequestsConfig{ - IgnoreWhitespaceConflicts: false, - AllowMerge: true, - AllowRebase: true, - AllowRebaseMerge: true, - AllowSquash: true, - AllowManualMerge: true, - AutodetectManualMerge: false, - DefaultMergeStyle: models.MergeStyleMerge, + IgnoreWhitespaceConflicts: false, + AllowMerge: true, + AllowRebase: true, + AllowRebaseMerge: true, + AllowSquash: true, + AllowManualMerge: true, + AutodetectManualMerge: false, + DefaultDeleteBranchAfterMerge: false, + DefaultMergeStyle: models.MergeStyleMerge, } } else { config = unit.PullRequestsConfig() @@ -758,6 +874,9 @@ func updateRepoUnits(ctx *context.APIContext, opts api.EditRepoOption) error { if opts.AutodetectManualMerge != nil { config.AutodetectManualMerge = *opts.AutodetectManualMerge } + if opts.DefaultDeleteBranchAfterMerge != nil { + config.DefaultDeleteBranchAfterMerge = *opts.DefaultDeleteBranchAfterMerge + } if opts.DefaultMergeStyle != nil { config.DefaultMergeStyle = models.MergeStyle(*opts.DefaultMergeStyle) } @@ -889,6 +1008,10 @@ func Delete(ctx *context.APIContext) { return } + if ctx.Repo.GitRepo != nil { + ctx.Repo.GitRepo.Close() + } + if err := repo_service.DeleteRepository(ctx.User, repo); err != nil { ctx.Error(http.StatusInternalServerError, "DeleteRepository", err) return diff --git a/routers/api/v1/repo/status.go b/routers/api/v1/repo/status.go index 7ab399b57275..95c3f00a72e4 100644 --- a/routers/api/v1/repo/status.go +++ b/routers/api/v1/repo/status.go @@ -171,39 +171,14 @@ func GetCommitStatusesByRef(ctx *context.APIContext) { // "400": // "$ref": "#/responses/error" - filter := ctx.Params("ref") - if len(filter) == 0 { - ctx.Error(http.StatusBadRequest, "ref not given", nil) + filter := utils.ResolveRefOrSha(ctx, ctx.Params("ref")) + if ctx.Written() { return } - for _, reftype := range []string{"heads", "tags"} { //Search branches and tags - refSHA, lastMethodName, err := searchRefCommitByType(ctx, reftype, filter) - if err != nil { - ctx.Error(http.StatusInternalServerError, lastMethodName, err) - return - } - if refSHA != "" { - filter = refSHA - break - } - - } - getCommitStatuses(ctx, filter) //By default filter is maybe the raw SHA } -func searchRefCommitByType(ctx *context.APIContext, refType, filter string) (string, string, error) { - refs, lastMethodName, err := getGitRefs(ctx, refType+"/"+filter) //Search by type - if err != nil { - return "", lastMethodName, err - } - if len(refs) > 0 { - return refs[0].Object.String(), "", nil //Return found SHA - } - return "", "", nil -} - func getCommitStatuses(ctx *context.APIContext, sha string) { if len(sha) == 0 { ctx.Error(http.StatusBadRequest, "ref/sha not given", nil) @@ -272,11 +247,11 @@ func GetCombinedCommitStatusByRef(ctx *context.APIContext) { // "400": // "$ref": "#/responses/error" - sha := ctx.Params("ref") - if len(sha) == 0 { - ctx.Error(http.StatusBadRequest, "ref/sha not given", nil) + sha := utils.ResolveRefOrSha(ctx, ctx.Params("ref")) + if ctx.Written() { return } + repo := ctx.Repo.Repository statuses, err := models.GetLatestCommitStatus(repo.ID, sha, utils.GetListOptions(ctx)) diff --git a/routers/api/v1/repo/tag.go b/routers/api/v1/repo/tag.go index ec9b541bd41d..c95fb63f859e 100644 --- a/routers/api/v1/repo/tag.go +++ b/routers/api/v1/repo/tag.go @@ -6,12 +6,14 @@ package repo import ( "errors" + "fmt" "net/http" "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/convert" api "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/routers/api/v1/utils" releaseservice "code.gitea.io/gitea/services/release" ) @@ -62,9 +64,9 @@ func ListTags(ctx *context.APIContext) { ctx.JSON(http.StatusOK, &apiTags) } -// GetTag get the tag of a repository. -func GetTag(ctx *context.APIContext) { - // swagger:operation GET /repos/{owner}/{repo}/git/tags/{sha} repository GetTag +// GetAnnotatedTag get the tag of a repository. +func GetAnnotatedTag(ctx *context.APIContext) { + // swagger:operation GET /repos/{owner}/{repo}/git/tags/{sha} repository GetAnnotatedTag // --- // summary: Gets the tag object of an annotated tag (not lightweight tags) // produces: @@ -98,16 +100,113 @@ func GetTag(ctx *context.APIContext) { } if tag, err := ctx.Repo.GitRepo.GetAnnotatedTag(sha); err != nil { - ctx.Error(http.StatusBadRequest, "GetTag", err) + ctx.Error(http.StatusBadRequest, "GetAnnotatedTag", err) } else { commit, err := tag.Commit() if err != nil { - ctx.Error(http.StatusBadRequest, "GetTag", err) + ctx.Error(http.StatusBadRequest, "GetAnnotatedTag", err) } ctx.JSON(http.StatusOK, convert.ToAnnotatedTag(ctx.Repo.Repository, tag, commit)) } } +// GetTag get the tag of a repository +func GetTag(ctx *context.APIContext) { + // swagger:operation GET /repos/{owner}/{repo}/tags/{tag} repository repoGetTag + // --- + // summary: Get the tag of a repository by tag name + // produces: + // - application/json + // parameters: + // - name: owner + // in: path + // description: owner of the repo + // type: string + // required: true + // - name: repo + // in: path + // description: name of the repo + // type: string + // required: true + // - name: tag + // in: path + // description: name of tag + // type: string + // required: true + // responses: + // "200": + // "$ref": "#/responses/Tag" + // "404": + // "$ref": "#/responses/notFound" + tagName := ctx.Params("*") + + tag, err := ctx.Repo.GitRepo.GetTag(tagName) + if err != nil { + ctx.NotFound(tagName) + return + } + ctx.JSON(http.StatusOK, convert.ToTag(ctx.Repo.Repository, tag)) +} + +// CreateTag create a new git tag in a repository +func CreateTag(ctx *context.APIContext) { + // swagger:operation POST /repos/{owner}/{repo}/tags repository repoCreateTag + // --- + // summary: Create a new git tag in a repository + // produces: + // - application/json + // parameters: + // - name: owner + // in: path + // description: owner of the repo + // type: string + // required: true + // - name: repo + // in: path + // description: name of the repo + // type: string + // required: true + // - name: body + // in: body + // schema: + // "$ref": "#/definitions/CreateTagOption" + // responses: + // "200": + // "$ref": "#/responses/Tag" + // "404": + // "$ref": "#/responses/notFound" + // "409": + // "$ref": "#/responses/conflict" + form := web.GetForm(ctx).(*api.CreateTagOption) + + // If target is not provided use default branch + if len(form.Target) == 0 { + form.Target = ctx.Repo.Repository.DefaultBranch + } + + commit, err := ctx.Repo.GitRepo.GetCommit(form.Target) + if err != nil { + ctx.Error(http.StatusNotFound, "target not found", fmt.Errorf("target not found: %v", err)) + return + } + + if err := releaseservice.CreateNewTag(ctx.User, ctx.Repo.Repository, commit.ID.String(), form.TagName, form.Message); err != nil { + if models.IsErrTagAlreadyExists(err) { + ctx.Error(http.StatusConflict, "tag exist", err) + return + } + ctx.InternalServerError(err) + return + } + + tag, err := ctx.Repo.GitRepo.GetTag(form.TagName) + if err != nil { + ctx.InternalServerError(err) + return + } + ctx.JSON(http.StatusCreated, convert.ToTag(ctx.Repo.Repository, tag)) +} + // DeleteTag delete a specific tag of in a repository by name func DeleteTag(ctx *context.APIContext) { // swagger:operation DELETE /repos/{owner}/{repo}/tags/{tag} repository repoDeleteTag @@ -138,8 +237,9 @@ func DeleteTag(ctx *context.APIContext) { // "$ref": "#/responses/notFound" // "409": // "$ref": "#/responses/conflict" + tagName := ctx.Params("*") - tag, err := models.GetRelease(ctx.Repo.Repository.ID, ctx.Params("tag")) + tag, err := models.GetRelease(ctx.Repo.Repository.ID, tagName) if err != nil { if models.IsErrReleaseNotExist(err) { ctx.NotFound() diff --git a/routers/api/v1/settings/settings.go b/routers/api/v1/settings/settings.go index e6417e40748c..ca2d28fb8bf2 100644 --- a/routers/api/v1/settings/settings.go +++ b/routers/api/v1/settings/settings.go @@ -25,6 +25,7 @@ func GetGeneralUISettings(ctx *context.APIContext) { ctx.JSON(http.StatusOK, api.GeneralUISettings{ DefaultTheme: setting.UI.DefaultTheme, AllowedReactions: setting.UI.Reactions, + CustomEmojis: setting.UI.CustomEmojis, }) } diff --git a/routers/api/v1/swagger/options.go b/routers/api/v1/swagger/options.go index dad025710dba..0ae96a920354 100644 --- a/routers/api/v1/swagger/options.go +++ b/routers/api/v1/swagger/options.go @@ -87,6 +87,8 @@ type swaggerParameterBodies struct { TransferRepoOption api.TransferRepoOption // in:body CreateForkOption api.CreateForkOption + // in:body + GenerateRepoOption api.GenerateRepoOption // in:body CreateStatusOption api.CreateStatusOption @@ -158,4 +160,10 @@ type swaggerParameterBodies struct { // in:body PullReviewRequestOptions api.PullReviewRequestOptions + + // in:body + CreateTagOption api.CreateTagOption + + // in:body + UserSettingsOptions api.UserSettingsOptions } diff --git a/routers/api/v1/swagger/user.go b/routers/api/v1/swagger/user.go index a2df40e4cf5a..a4d52012367a 100644 --- a/routers/api/v1/swagger/user.go +++ b/routers/api/v1/swagger/user.go @@ -42,3 +42,10 @@ type swaggerResponseUserHeatmapData struct { // in:body Body []models.UserHeatmapData `json:"body"` } + +// UserSettings +// swagger:response UserSettings +type swaggerResponseUserSettings struct { + // in:body + Body []api.UserSettings `json:"body"` +} diff --git a/routers/api/v1/user/app.go b/routers/api/v1/user/app.go index 7b69c50d7eb7..9f355a828950 100644 --- a/routers/api/v1/user/app.go +++ b/routers/api/v1/user/app.go @@ -166,7 +166,7 @@ func DeleteAccessToken(ctx *context.APIContext) { case 1: tokenID = tokens[0].ID default: - ctx.Error(http.StatusUnprocessableEntity, "DeleteAccessTokenByID", fmt.Errorf("multible matches for token name '%s'", token)) + ctx.Error(http.StatusUnprocessableEntity, "DeleteAccessTokenByID", fmt.Errorf("multiple matches for token name '%s'", token)) return } } diff --git a/routers/api/v1/user/gpg_key.go b/routers/api/v1/user/gpg_key.go index 51bcaeacc6c4..ec03e305ba1b 100644 --- a/routers/api/v1/user/gpg_key.go +++ b/routers/api/v1/user/gpg_key.go @@ -5,6 +5,7 @@ package user import ( + "fmt" "net/http" "code.gitea.io/gitea/models" @@ -119,14 +120,84 @@ func GetGPGKey(ctx *context.APIContext) { // CreateUserGPGKey creates new GPG key to given user by ID. func CreateUserGPGKey(ctx *context.APIContext, form api.CreateGPGKeyOption, uid int64) { - keys, err := models.AddGPGKey(uid, form.ArmoredKey) + token := models.VerificationToken(ctx.User, 1) + lastToken := models.VerificationToken(ctx.User, 0) + + keys, err := models.AddGPGKey(uid, form.ArmoredKey, token, form.Signature) + if err != nil && models.IsErrGPGInvalidTokenSignature(err) { + keys, err = models.AddGPGKey(uid, form.ArmoredKey, lastToken, form.Signature) + } if err != nil { - HandleAddGPGKeyError(ctx, err) + HandleAddGPGKeyError(ctx, err, token) return } ctx.JSON(http.StatusCreated, convert.ToGPGKey(keys[0])) } +// GetVerificationToken returns the current token to be signed for this user +func GetVerificationToken(ctx *context.APIContext) { + // swagger:operation GET /user/gpg_key_token user getVerificationToken + // --- + // summary: Get a Token to verify + // produces: + // - text/plain + // parameters: + // responses: + // "200": + // "$ref": "#/responses/string" + // "404": + // "$ref": "#/responses/notFound" + + token := models.VerificationToken(ctx.User, 1) + ctx.PlainText(http.StatusOK, []byte(token)) +} + +// VerifyUserGPGKey creates new GPG key to given user by ID. +func VerifyUserGPGKey(ctx *context.APIContext) { + // swagger:operation POST /user/gpg_key_verify user userVerifyGPGKey + // --- + // summary: Verify a GPG key + // consumes: + // - application/json + // produces: + // - application/json + // responses: + // "201": + // "$ref": "#/responses/GPGKey" + // "404": + // "$ref": "#/responses/notFound" + // "422": + // "$ref": "#/responses/validationError" + + form := web.GetForm(ctx).(*api.VerifyGPGKeyOption) + token := models.VerificationToken(ctx.User, 1) + lastToken := models.VerificationToken(ctx.User, 0) + + _, err := models.VerifyGPGKey(ctx.User.ID, form.KeyID, token, form.Signature) + if err != nil && models.IsErrGPGInvalidTokenSignature(err) { + _, err = models.VerifyGPGKey(ctx.User.ID, form.KeyID, lastToken, form.Signature) + } + + if err != nil { + if models.IsErrGPGInvalidTokenSignature(err) { + ctx.Error(http.StatusUnprocessableEntity, "GPGInvalidSignature", fmt.Sprintf("The provided GPG key, signature and token do not match or token is out of date. Provide a valid signature for the token: %s", token)) + return + } + ctx.Error(http.StatusInternalServerError, "VerifyUserGPGKey", err) + } + + key, err := models.GetGPGKeysByKeyID(form.KeyID) + if err != nil { + if models.IsErrGPGKeyNotExist(err) { + ctx.NotFound() + } else { + ctx.Error(http.StatusInternalServerError, "GetGPGKeysByKeyID", err) + } + return + } + ctx.JSON(http.StatusOK, convert.ToGPGKey(key[0])) +} + // swagger:parameters userCurrentPostGPGKey type swaggerUserCurrentPostGPGKey struct { // in:body @@ -189,7 +260,7 @@ func DeleteGPGKey(ctx *context.APIContext) { } // HandleAddGPGKeyError handle add GPGKey error -func HandleAddGPGKeyError(ctx *context.APIContext, err error) { +func HandleAddGPGKeyError(ctx *context.APIContext, err error, token string) { switch { case models.IsErrGPGKeyAccessDenied(err): ctx.Error(http.StatusUnprocessableEntity, "GPGKeyAccessDenied", "You do not have access to this GPG key") @@ -198,7 +269,9 @@ func HandleAddGPGKeyError(ctx *context.APIContext, err error) { case models.IsErrGPGKeyParsing(err): ctx.Error(http.StatusUnprocessableEntity, "GPGKeyParsing", err) case models.IsErrGPGNoEmailFound(err): - ctx.Error(http.StatusNotFound, "GPGNoEmailFound", err) + ctx.Error(http.StatusNotFound, "GPGNoEmailFound", fmt.Sprintf("None of the emails attached to the GPG key could be found. It may still be added if you provide a valid signature for the token: %s", token)) + case models.IsErrGPGInvalidTokenSignature(err): + ctx.Error(http.StatusUnprocessableEntity, "GPGInvalidSignature", fmt.Sprintf("The provided GPG key, signature and token do not match or token is out of date. Provide a valid signature for the token: %s", token)) default: ctx.Error(http.StatusInternalServerError, "AddGPGKey", err) } diff --git a/routers/api/v1/user/helper.go b/routers/api/v1/user/helper.go index fcdac257edc8..a3500e0ee648 100644 --- a/routers/api/v1/user/helper.go +++ b/routers/api/v1/user/helper.go @@ -17,7 +17,7 @@ func GetUserByParamsName(ctx *context.APIContext, name string) *models.User { user, err := models.GetUserByName(username) if err != nil { if models.IsErrUserNotExist(err) { - if redirectUserID, err := models.LookupUserRedirect(username); err == nil { + if redirectUserID, err2 := models.LookupUserRedirect(username); err2 == nil { context.RedirectToUser(ctx.Context, username, redirectUserID) } else { ctx.NotFound("GetUserByName", err) diff --git a/routers/api/v1/user/settings.go b/routers/api/v1/user/settings.go new file mode 100644 index 000000000000..b4548e7443fa --- /dev/null +++ b/routers/api/v1/user/settings.go @@ -0,0 +1,83 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package user + +import ( + "net/http" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/convert" + api "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/web" +) + +// GetUserSettings returns user settings +func GetUserSettings(ctx *context.APIContext) { + // swagger:operation GET /user/settings user getUserSettings + // --- + // summary: Get user settings + // produces: + // - application/json + // responses: + // "200": + // "$ref": "#/responses/UserSettings" + ctx.JSON(http.StatusOK, convert.User2UserSettings(ctx.User)) +} + +// UpdateUserSettings returns user settings +func UpdateUserSettings(ctx *context.APIContext) { + // swagger:operation PATCH /user/settings user updateUserSettings + // --- + // summary: Update user settings + // parameters: + // - name: body + // in: body + // schema: + // "$ref": "#/definitions/UserSettingsOptions" + // produces: + // - application/json + // responses: + // "200": + // "$ref": "#/responses/UserSettings" + + form := web.GetForm(ctx).(*api.UserSettingsOptions) + + if form.FullName != nil { + ctx.User.FullName = *form.FullName + } + if form.Description != nil { + ctx.User.Description = *form.Description + } + if form.Website != nil { + ctx.User.Website = *form.Website + } + if form.Location != nil { + ctx.User.Location = *form.Location + } + if form.Language != nil { + ctx.User.Language = *form.Language + } + if form.Theme != nil { + ctx.User.Theme = *form.Theme + } + if form.DiffViewStyle != nil { + ctx.User.DiffViewStyle = *form.DiffViewStyle + } + + if form.HideEmail != nil { + ctx.User.KeepEmailPrivate = *form.HideEmail + } + if form.HideActivity != nil { + ctx.User.KeepActivityPrivate = *form.HideActivity + } + + if err := models.UpdateUser(ctx.User); err != nil { + ctx.InternalServerError(err) + return + } + + ctx.JSON(http.StatusOK, convert.User2UserSettings(ctx.User)) +} diff --git a/routers/api/v1/user/user.go b/routers/api/v1/user/user.go index 6e811bf0f8a4..ac543d597d7e 100644 --- a/routers/api/v1/user/user.go +++ b/routers/api/v1/user/user.go @@ -13,7 +13,6 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/convert" - api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/routers/api/v1/utils" ) @@ -58,6 +57,7 @@ func Search(ctx *context.APIContext) { listOptions := utils.GetListOptions(ctx) opts := &models.SearchUserOptions{ + Actor: ctx.User, Keyword: strings.Trim(ctx.Query("q"), " "), UID: ctx.QueryInt64("uid"), Type: models.UserTypeIndividual, @@ -73,18 +73,13 @@ func Search(ctx *context.APIContext) { return } - results := make([]*api.User, len(users)) - for i := range users { - results[i] = convert.ToUser(users[i], ctx.User) - } - ctx.SetLinkHeader(int(maxResults), listOptions.PageSize) ctx.Header().Set("X-Total-Count", fmt.Sprintf("%d", maxResults)) ctx.Header().Set("Access-Control-Expose-Headers", "X-Total-Count, Link") ctx.JSON(http.StatusOK, map[string]interface{}{ "ok": true, - "data": results, + "data": convert.ToUsers(ctx.User, users), }) } @@ -108,10 +103,16 @@ func GetInfo(ctx *context.APIContext) { // "$ref": "#/responses/notFound" u := GetUserByParams(ctx) + if ctx.Written() { return } + if !u.IsVisibleToUser(ctx.User) { + // fake ErrUserNotExist error message to not leak information about existence + ctx.NotFound("GetUserByName", models.ErrUserNotExist{Name: ctx.Params(":username")}) + return + } ctx.JSON(http.StatusOK, convert.ToUser(u, ctx.User)) } diff --git a/routers/api/v1/utils/git.go b/routers/api/v1/utils/git.go new file mode 100644 index 000000000000..ac68c3625d92 --- /dev/null +++ b/routers/api/v1/utils/git.go @@ -0,0 +1,61 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package utils + +import ( + "net/http" + + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/git" +) + +// ResolveRefOrSha resolve ref to sha if exist +func ResolveRefOrSha(ctx *context.APIContext, ref string) string { + if len(ref) == 0 { + ctx.Error(http.StatusBadRequest, "ref not given", nil) + return "" + } + + // Search branches and tags + for _, refType := range []string{"heads", "tags"} { + refSHA, lastMethodName, err := searchRefCommitByType(ctx, refType, ref) + if err != nil { + ctx.Error(http.StatusInternalServerError, lastMethodName, err) + return "" + } + if refSHA != "" { + return refSHA + } + } + return ref +} + +// GetGitRefs return git references based on filter +func GetGitRefs(ctx *context.APIContext, filter string) ([]*git.Reference, string, error) { + if ctx.Repo.GitRepo == nil { + var err error + ctx.Repo.GitRepo, err = git.OpenRepository(ctx.Repo.Repository.RepoPath()) + if err != nil { + return nil, "OpenRepository", err + } + defer ctx.Repo.GitRepo.Close() + } + if len(filter) > 0 { + filter = "refs/" + filter + } + refs, err := ctx.Repo.GitRepo.GetRefsFiltered(filter) + return refs, "GetRefsFiltered", err +} + +func searchRefCommitByType(ctx *context.APIContext, refType, filter string) (string, string, error) { + refs, lastMethodName, err := GetGitRefs(ctx, refType+"/"+filter) //Search by type + if err != nil { + return "", lastMethodName, err + } + if len(refs) > 0 { + return refs[0].Object.String(), "", nil //Return found SHA + } + return "", "", nil +} diff --git a/routers/api/v1/utils/hook.go b/routers/api/v1/utils/hook.go index c7af40dcf056..5f2be65a29d9 100644 --- a/routers/api/v1/utils/hook.go +++ b/routers/api/v1/utils/hook.go @@ -133,7 +133,7 @@ func addHook(ctx *context.APIContext, form *api.CreateHookOption, orgID, repoID BranchFilter: form.BranchFilter, }, IsActive: form.Active, - Type: models.HookTaskType(form.Type), + Type: models.HookType(form.Type), } if w.Type == models.SLACK { channel, ok := form.Config["channel"] diff --git a/routers/api/v1/utils/utils.go b/routers/api/v1/utils/utils.go index ad1a136db463..10ab3ebd0cfb 100644 --- a/routers/api/v1/utils/utils.go +++ b/routers/api/v1/utils/utils.go @@ -55,7 +55,7 @@ func parseTime(value string) (int64, error) { // prepareQueryArg unescape and trim a query arg func prepareQueryArg(ctx *context.APIContext, name string) (value string, err error) { value, err = url.PathUnescape(ctx.Query(name)) - value = strings.Trim(value, " ") + value = strings.TrimSpace(value) return } diff --git a/routers/common/db.go b/routers/common/db.go new file mode 100644 index 000000000000..069a46f64fe4 --- /dev/null +++ b/routers/common/db.go @@ -0,0 +1,39 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package common + +import ( + "context" + "fmt" + "time" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/migrations" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" +) + +// InitDBEngine In case of problems connecting to DB, retry connection. Eg, PGSQL in Docker Container on Synology +func InitDBEngine(ctx context.Context) (err error) { + log.Info("Beginning ORM engine initialization.") + for i := 0; i < setting.Database.DBConnectRetries; i++ { + select { + case <-ctx.Done(): + return fmt.Errorf("Aborted due to shutdown:\nin retry ORM engine initialization") + default: + } + log.Info("ORM engine initialization attempt #%d/%d...", i+1, setting.Database.DBConnectRetries) + if err = models.NewEngine(ctx, migrations.Migrate); err == nil { + break + } else if i == setting.Database.DBConnectRetries-1 { + return err + } + log.Error("ORM engine initialization attempt #%d/%d failed. Error: %v", i+1, setting.Database.DBConnectRetries, err) + log.Info("Backing off for %d seconds", int64(setting.Database.DBConnectBackoff/time.Second)) + time.Sleep(setting.Database.DBConnectBackoff) + } + models.HasEngine = true + return nil +} diff --git a/routers/common/logger.go b/routers/common/logger.go new file mode 100644 index 000000000000..bc1149543c94 --- /dev/null +++ b/routers/common/logger.go @@ -0,0 +1,33 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package common + +import ( + "net/http" + "time" + + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/log" +) + +// LoggerHandler is a handler that will log the routing to the default gitea log +func LoggerHandler(level log.Level) func(next http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { + start := time.Now() + + _ = log.GetLogger("router").Log(0, level, "Started %s %s for %s", log.ColoredMethod(req.Method), req.URL.RequestURI(), req.RemoteAddr) + + next.ServeHTTP(w, req) + + var status int + if v, ok := w.(context.ResponseWriter); ok { + status = v.Status() + } + + _ = log.GetLogger("router").Log(0, level, "Completed %s %s %v %s in %v", log.ColoredMethod(req.Method), req.URL.RequestURI(), log.ColoredStatus(status), log.ColoredStatus(status, http.StatusText(status)), log.ColoredTime(time.Since(start))) + }) + } +} diff --git a/routers/common/middleware.go b/routers/common/middleware.go new file mode 100644 index 000000000000..1d96522dd9d1 --- /dev/null +++ b/routers/common/middleware.go @@ -0,0 +1,76 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package common + +import ( + "fmt" + "net/http" + "strings" + + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" + + "github.com/chi-middleware/proxy" + "github.com/go-chi/chi/middleware" +) + +// Middlewares returns common middlewares +func Middlewares() []func(http.Handler) http.Handler { + var handlers = []func(http.Handler) http.Handler{ + func(next http.Handler) http.Handler { + return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { + next.ServeHTTP(context.NewResponse(resp), req) + }) + }, + } + + if setting.ReverseProxyLimit > 0 { + opt := proxy.NewForwardedHeadersOptions(). + WithForwardLimit(setting.ReverseProxyLimit). + ClearTrustedProxies() + for _, n := range setting.ReverseProxyTrustedProxies { + if !strings.Contains(n, "/") { + opt.AddTrustedProxy(n) + } else { + opt.AddTrustedNetwork(n) + } + } + handlers = append(handlers, proxy.ForwardedHeaders(opt)) + } + + handlers = append(handlers, middleware.StripSlashes) + + if !setting.DisableRouterLog && setting.RouterLogLevel != log.NONE { + if log.GetLogger("router").GetLevel() <= setting.RouterLogLevel { + handlers = append(handlers, LoggerHandler(setting.RouterLogLevel)) + } + } + if setting.EnableAccessLog { + handlers = append(handlers, context.AccessLogger()) + } + + handlers = append(handlers, func(next http.Handler) http.Handler { + return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { + // Why we need this? The Recovery() will try to render a beautiful + // error page for user, but the process can still panic again, and other + // middleware like session also may panic then we have to recover twice + // and send a simple error page that should not panic any more. + defer func() { + if err := recover(); err != nil { + combinedErr := fmt.Sprintf("PANIC: %v\n%s", err, string(log.Stack(2))) + log.Error("%v", combinedErr) + if setting.IsProd() { + http.Error(resp, http.StatusText(500), 500) + } else { + http.Error(resp, combinedErr, 500) + } + } + }() + next.ServeHTTP(resp, req) + }) + }) + return handlers +} diff --git a/routers/common/repo.go b/routers/common/repo.go new file mode 100644 index 000000000000..8d33fb07fbbd --- /dev/null +++ b/routers/common/repo.go @@ -0,0 +1,105 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package common + +import ( + "fmt" + "io" + "path" + "path/filepath" + "strings" + + "code.gitea.io/gitea/modules/charset" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/httpcache" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/typesniffer" +) + +// ServeBlob download a git.Blob +func ServeBlob(ctx *context.Context, blob *git.Blob) error { + if httpcache.HandleGenericETagCache(ctx.Req, ctx.Resp, `"`+blob.ID.String()+`"`) { + return nil + } + + dataRc, err := blob.DataAsync() + if err != nil { + return err + } + defer func() { + if err = dataRc.Close(); err != nil { + log.Error("ServeBlob: Close: %v", err) + } + }() + + return ServeData(ctx, ctx.Repo.TreePath, blob.Size(), dataRc) +} + +// ServeData download file from io.Reader +func ServeData(ctx *context.Context, name string, size int64, reader io.Reader) error { + buf := make([]byte, 1024) + n, err := reader.Read(buf) + if err != nil && err != io.EOF { + return err + } + if n >= 0 { + buf = buf[:n] + } + + ctx.Resp.Header().Set("Cache-Control", "public,max-age=86400") + + if size >= 0 { + ctx.Resp.Header().Set("Content-Length", fmt.Sprintf("%d", size)) + } else { + log.Error("ServeData called to serve data: %s with size < 0: %d", name, size) + } + name = path.Base(name) + + // Google Chrome dislike commas in filenames, so let's change it to a space + name = strings.ReplaceAll(name, ",", " ") + + st := typesniffer.DetectContentType(buf) + + mappedMimeType := "" + if setting.MimeTypeMap.Enabled { + fileExtension := strings.ToLower(filepath.Ext(name)) + mappedMimeType = setting.MimeTypeMap.Map[fileExtension] + } + if st.IsText() || ctx.QueryBool("render") { + cs, err := charset.DetectEncoding(buf) + if err != nil { + log.Error("Detect raw file %s charset failed: %v, using by default utf-8", name, err) + cs = "utf-8" + } + if mappedMimeType == "" { + mappedMimeType = "text/plain" + } + ctx.Resp.Header().Set("Content-Type", mappedMimeType+"; charset="+strings.ToLower(cs)) + } else { + ctx.Resp.Header().Set("Access-Control-Expose-Headers", "Content-Disposition") + if mappedMimeType != "" { + ctx.Resp.Header().Set("Content-Type", mappedMimeType) + } + if (st.IsImage() || st.IsPDF()) && (setting.UI.SVG.Enabled || !st.IsSvgImage()) { + ctx.Resp.Header().Set("Content-Disposition", fmt.Sprintf(`inline; filename="%s"`, name)) + if st.IsSvgImage() { + ctx.Resp.Header().Set("Content-Security-Policy", "default-src 'none'; style-src 'unsafe-inline'; sandbox") + ctx.Resp.Header().Set("X-Content-Type-Options", "nosniff") + ctx.Resp.Header().Set("Content-Type", typesniffer.SvgMimeType) + } + } else { + ctx.Resp.Header().Set("Content-Disposition", fmt.Sprintf(`attachment; filename="%s"`, name)) + } + } + + _, err = ctx.Resp.Write(buf) + if err != nil { + return err + } + _, err = io.Copy(ctx.Resp, reader) + return err +} diff --git a/routers/home.go b/routers/home.go deleted file mode 100644 index 7eaebc081fd4..000000000000 --- a/routers/home.go +++ /dev/null @@ -1,413 +0,0 @@ -// Copyright 2014 The Gogs Authors. All rights reserved. -// Copyright 2019 The Gitea Authors. All rights reserved. -// Use of this source code is governed by a MIT-style -// license that can be found in the LICENSE file. - -package routers - -import ( - "bytes" - "net/http" - "strings" - - "code.gitea.io/gitea/models" - "code.gitea.io/gitea/modules/base" - "code.gitea.io/gitea/modules/context" - code_indexer "code.gitea.io/gitea/modules/indexer/code" - "code.gitea.io/gitea/modules/log" - "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/structs" - "code.gitea.io/gitea/modules/util" - "code.gitea.io/gitea/modules/web/middleware" - "code.gitea.io/gitea/routers/user" -) - -const ( - // tplHome home page template - tplHome base.TplName = "home" - // tplExploreRepos explore repositories page template - tplExploreRepos base.TplName = "explore/repos" - // tplExploreUsers explore users page template - tplExploreUsers base.TplName = "explore/users" - // tplExploreOrganizations explore organizations page template - tplExploreOrganizations base.TplName = "explore/organizations" - // tplExploreCode explore code page template - tplExploreCode base.TplName = "explore/code" -) - -// Home render home page -func Home(ctx *context.Context) { - if ctx.IsSigned { - if !ctx.User.IsActive && setting.Service.RegisterEmailConfirm { - ctx.Data["Title"] = ctx.Tr("auth.active_your_account") - ctx.HTML(http.StatusOK, user.TplActivate) - } else if !ctx.User.IsActive || ctx.User.ProhibitLogin { - log.Info("Failed authentication attempt for %s from %s", ctx.User.Name, ctx.RemoteAddr()) - ctx.Data["Title"] = ctx.Tr("auth.prohibit_login") - ctx.HTML(http.StatusOK, "user/auth/prohibit_login") - } else if ctx.User.MustChangePassword { - ctx.Data["Title"] = ctx.Tr("auth.must_change_password") - ctx.Data["ChangePasscodeLink"] = setting.AppSubURL + "/user/change_password" - middleware.SetRedirectToCookie(ctx.Resp, setting.AppSubURL+ctx.Req.URL.RequestURI()) - ctx.Redirect(setting.AppSubURL + "/user/settings/change_password") - } else { - user.Dashboard(ctx) - } - return - // Check non-logged users landing page. - } else if setting.LandingPageURL != setting.LandingPageHome { - ctx.Redirect(setting.AppSubURL + string(setting.LandingPageURL)) - return - } - - // Check auto-login. - uname := ctx.GetCookie(setting.CookieUserName) - if len(uname) != 0 { - ctx.Redirect(setting.AppSubURL + "/user/login") - return - } - - ctx.Data["PageIsHome"] = true - ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled - ctx.HTML(http.StatusOK, tplHome) -} - -// RepoSearchOptions when calling search repositories -type RepoSearchOptions struct { - OwnerID int64 - Private bool - Restricted bool - PageSize int - TplName base.TplName -} - -var ( - nullByte = []byte{0x00} -) - -func isKeywordValid(keyword string) bool { - return !bytes.Contains([]byte(keyword), nullByte) -} - -// RenderRepoSearch render repositories search page -func RenderRepoSearch(ctx *context.Context, opts *RepoSearchOptions) { - page := ctx.QueryInt("page") - if page <= 0 { - page = 1 - } - - var ( - repos []*models.Repository - count int64 - err error - orderBy models.SearchOrderBy - ) - - ctx.Data["SortType"] = ctx.Query("sort") - switch ctx.Query("sort") { - case "newest": - orderBy = models.SearchOrderByNewest - case "oldest": - orderBy = models.SearchOrderByOldest - case "recentupdate": - orderBy = models.SearchOrderByRecentUpdated - case "leastupdate": - orderBy = models.SearchOrderByLeastUpdated - case "reversealphabetically": - orderBy = models.SearchOrderByAlphabeticallyReverse - case "alphabetically": - orderBy = models.SearchOrderByAlphabetically - case "reversesize": - orderBy = models.SearchOrderBySizeReverse - case "size": - orderBy = models.SearchOrderBySize - case "moststars": - orderBy = models.SearchOrderByStarsReverse - case "feweststars": - orderBy = models.SearchOrderByStars - case "mostforks": - orderBy = models.SearchOrderByForksReverse - case "fewestforks": - orderBy = models.SearchOrderByForks - default: - ctx.Data["SortType"] = "recentupdate" - orderBy = models.SearchOrderByRecentUpdated - } - - keyword := strings.Trim(ctx.Query("q"), " ") - topicOnly := ctx.QueryBool("topic") - ctx.Data["TopicOnly"] = topicOnly - - repos, count, err = models.SearchRepository(&models.SearchRepoOptions{ - ListOptions: models.ListOptions{ - Page: page, - PageSize: opts.PageSize, - }, - Actor: ctx.User, - OrderBy: orderBy, - Private: opts.Private, - Keyword: keyword, - OwnerID: opts.OwnerID, - AllPublic: true, - AllLimited: true, - TopicOnly: topicOnly, - IncludeDescription: setting.UI.SearchRepoDescription, - }) - if err != nil { - ctx.ServerError("SearchRepository", err) - return - } - ctx.Data["Keyword"] = keyword - ctx.Data["Total"] = count - ctx.Data["Repos"] = repos - ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled - - pager := context.NewPagination(int(count), opts.PageSize, page, 5) - pager.SetDefaultParams(ctx) - pager.AddParam(ctx, "topic", "TopicOnly") - ctx.Data["Page"] = pager - - ctx.HTML(http.StatusOK, opts.TplName) -} - -// ExploreRepos render explore repositories page -func ExploreRepos(ctx *context.Context) { - ctx.Data["UsersIsDisabled"] = setting.Service.Explore.DisableUsersPage - ctx.Data["Title"] = ctx.Tr("explore") - ctx.Data["PageIsExplore"] = true - ctx.Data["PageIsExploreRepositories"] = true - ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled - - var ownerID int64 - if ctx.User != nil && !ctx.User.IsAdmin { - ownerID = ctx.User.ID - } - - RenderRepoSearch(ctx, &RepoSearchOptions{ - PageSize: setting.UI.ExplorePagingNum, - OwnerID: ownerID, - Private: ctx.User != nil, - TplName: tplExploreRepos, - }) -} - -// RenderUserSearch render user search page -func RenderUserSearch(ctx *context.Context, opts *models.SearchUserOptions, tplName base.TplName) { - opts.Page = ctx.QueryInt("page") - if opts.Page <= 1 { - opts.Page = 1 - } - - var ( - users []*models.User - count int64 - err error - orderBy models.SearchOrderBy - ) - - ctx.Data["SortType"] = ctx.Query("sort") - switch ctx.Query("sort") { - case "newest": - orderBy = models.SearchOrderByIDReverse - case "oldest": - orderBy = models.SearchOrderByID - case "recentupdate": - orderBy = models.SearchOrderByRecentUpdated - case "leastupdate": - orderBy = models.SearchOrderByLeastUpdated - case "reversealphabetically": - orderBy = models.SearchOrderByAlphabeticallyReverse - case "alphabetically": - orderBy = models.SearchOrderByAlphabetically - default: - ctx.Data["SortType"] = "alphabetically" - orderBy = models.SearchOrderByAlphabetically - } - - opts.Keyword = strings.Trim(ctx.Query("q"), " ") - opts.OrderBy = orderBy - if len(opts.Keyword) == 0 || isKeywordValid(opts.Keyword) { - users, count, err = models.SearchUsers(opts) - if err != nil { - ctx.ServerError("SearchUsers", err) - return - } - } - ctx.Data["Keyword"] = opts.Keyword - ctx.Data["Total"] = count - ctx.Data["Users"] = users - ctx.Data["UsersTwoFaStatus"] = models.UserList(users).GetTwoFaStatus() - ctx.Data["ShowUserEmail"] = setting.UI.ShowUserEmail - ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled - - pager := context.NewPagination(int(count), opts.PageSize, opts.Page, 5) - pager.SetDefaultParams(ctx) - ctx.Data["Page"] = pager - - ctx.HTML(http.StatusOK, tplName) -} - -// ExploreUsers render explore users page -func ExploreUsers(ctx *context.Context) { - if setting.Service.Explore.DisableUsersPage { - ctx.Redirect(setting.AppSubURL + "/explore/repos") - return - } - ctx.Data["Title"] = ctx.Tr("explore") - ctx.Data["PageIsExplore"] = true - ctx.Data["PageIsExploreUsers"] = true - ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled - - RenderUserSearch(ctx, &models.SearchUserOptions{ - Actor: ctx.User, - Type: models.UserTypeIndividual, - ListOptions: models.ListOptions{PageSize: setting.UI.ExplorePagingNum}, - IsActive: util.OptionalBoolTrue, - Visible: []structs.VisibleType{structs.VisibleTypePublic, structs.VisibleTypeLimited, structs.VisibleTypePrivate}, - }, tplExploreUsers) -} - -// ExploreOrganizations render explore organizations page -func ExploreOrganizations(ctx *context.Context) { - ctx.Data["UsersIsDisabled"] = setting.Service.Explore.DisableUsersPage - ctx.Data["Title"] = ctx.Tr("explore") - ctx.Data["PageIsExplore"] = true - ctx.Data["PageIsExploreOrganizations"] = true - ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled - - visibleTypes := []structs.VisibleType{structs.VisibleTypePublic} - if ctx.User != nil { - visibleTypes = append(visibleTypes, structs.VisibleTypeLimited, structs.VisibleTypePrivate) - } - - RenderUserSearch(ctx, &models.SearchUserOptions{ - Actor: ctx.User, - Type: models.UserTypeOrganization, - ListOptions: models.ListOptions{PageSize: setting.UI.ExplorePagingNum}, - Visible: visibleTypes, - }, tplExploreOrganizations) -} - -// ExploreCode render explore code page -func ExploreCode(ctx *context.Context) { - if !setting.Indexer.RepoIndexerEnabled { - ctx.Redirect(setting.AppSubURL+"/explore", 302) - return - } - - ctx.Data["UsersIsDisabled"] = setting.Service.Explore.DisableUsersPage - ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled - ctx.Data["Title"] = ctx.Tr("explore") - ctx.Data["PageIsExplore"] = true - ctx.Data["PageIsExploreCode"] = true - - language := strings.TrimSpace(ctx.Query("l")) - keyword := strings.TrimSpace(ctx.Query("q")) - page := ctx.QueryInt("page") - if page <= 0 { - page = 1 - } - - queryType := strings.TrimSpace(ctx.Query("t")) - isMatch := queryType == "match" - - var ( - repoIDs []int64 - err error - isAdmin bool - ) - if ctx.User != nil { - isAdmin = ctx.User.IsAdmin - } - - // guest user or non-admin user - if ctx.User == nil || !isAdmin { - repoIDs, err = models.FindUserAccessibleRepoIDs(ctx.User) - if err != nil { - ctx.ServerError("SearchResults", err) - return - } - } - - var ( - total int - searchResults []*code_indexer.Result - searchResultLanguages []*code_indexer.SearchResultLanguages - ) - - // if non-admin login user, we need check UnitTypeCode at first - if ctx.User != nil && len(repoIDs) > 0 { - repoMaps, err := models.GetRepositoriesMapByIDs(repoIDs) - if err != nil { - ctx.ServerError("SearchResults", err) - return - } - - var rightRepoMap = make(map[int64]*models.Repository, len(repoMaps)) - repoIDs = make([]int64, 0, len(repoMaps)) - for id, repo := range repoMaps { - if repo.CheckUnitUser(ctx.User, models.UnitTypeCode) { - rightRepoMap[id] = repo - repoIDs = append(repoIDs, id) - } - } - - ctx.Data["RepoMaps"] = rightRepoMap - - total, searchResults, searchResultLanguages, err = code_indexer.PerformSearch(repoIDs, language, keyword, page, setting.UI.RepoSearchPagingNum, isMatch) - if err != nil { - ctx.ServerError("SearchResults", err) - return - } - // if non-login user or isAdmin, no need to check UnitTypeCode - } else if (ctx.User == nil && len(repoIDs) > 0) || isAdmin { - total, searchResults, searchResultLanguages, err = code_indexer.PerformSearch(repoIDs, language, keyword, page, setting.UI.RepoSearchPagingNum, isMatch) - if err != nil { - ctx.ServerError("SearchResults", err) - return - } - - var loadRepoIDs = make([]int64, 0, len(searchResults)) - for _, result := range searchResults { - var find bool - for _, id := range loadRepoIDs { - if id == result.RepoID { - find = true - break - } - } - if !find { - loadRepoIDs = append(loadRepoIDs, result.RepoID) - } - } - - repoMaps, err := models.GetRepositoriesMapByIDs(loadRepoIDs) - if err != nil { - ctx.ServerError("SearchResults", err) - return - } - - ctx.Data["RepoMaps"] = repoMaps - } - - ctx.Data["Keyword"] = keyword - ctx.Data["Language"] = language - ctx.Data["queryType"] = queryType - ctx.Data["SearchResults"] = searchResults - ctx.Data["SearchResultLanguages"] = searchResultLanguages - ctx.Data["RequireHighlightJS"] = true - ctx.Data["PageIsViewCode"] = true - - pager := context.NewPagination(total, setting.UI.RepoSearchPagingNum, page, 5) - pager.SetDefaultParams(ctx) - pager.AddParam(ctx, "l", "Language") - ctx.Data["Page"] = pager - - ctx.HTML(http.StatusOK, tplExploreCode) -} - -// NotFound render 404 page -func NotFound(ctx *context.Context) { - ctx.Data["Title"] = "Page Not Found" - ctx.NotFound("home.NotFound", nil) -} diff --git a/routers/init.go b/routers/init.go index 220d87a29da8..3ee7c7357285 100644 --- a/routers/init.go +++ b/routers/init.go @@ -6,13 +6,9 @@ package routers import ( "context" - "fmt" "strings" - "time" "code.gitea.io/gitea/models" - "code.gitea.io/gitea/models/migrations" - "code.gitea.io/gitea/modules/auth/sso" "code.gitea.io/gitea/modules/cache" "code.gitea.io/gitea/modules/cron" "code.gitea.io/gitea/modules/eventsource" @@ -32,6 +28,13 @@ import ( "code.gitea.io/gitea/modules/svg" "code.gitea.io/gitea/modules/task" "code.gitea.io/gitea/modules/translation" + "code.gitea.io/gitea/modules/web" + apiv1 "code.gitea.io/gitea/routers/api/v1" + "code.gitea.io/gitea/routers/common" + "code.gitea.io/gitea/routers/private" + web_routers "code.gitea.io/gitea/routers/web" + "code.gitea.io/gitea/services/archiver" + "code.gitea.io/gitea/services/auth" "code.gitea.io/gitea/services/mailer" mirror_service "code.gitea.io/gitea/services/mirror" pull_service "code.gitea.io/gitea/services/pull" @@ -39,16 +42,6 @@ import ( "code.gitea.io/gitea/services/webhook" ) -func checkRunMode() { - switch setting.RunMode { - case "dev", "test": - git.Debug = true - default: - git.Debug = false - } - log.Info("Run Mode: %s", strings.Title(setting.RunMode)) -} - // NewServices init new services func NewServices() { setting.NewServices() @@ -59,64 +52,12 @@ func NewServices() { log.Fatal("repository init failed: %v", err) } mailer.NewContext() - _ = cache.NewContext() - notification.NewContext() -} - -// In case of problems connecting to DB, retry connection. Eg, PGSQL in Docker Container on Synology -func initDBEngine(ctx context.Context) (err error) { - log.Info("Beginning ORM engine initialization.") - for i := 0; i < setting.Database.DBConnectRetries; i++ { - select { - case <-ctx.Done(): - return fmt.Errorf("Aborted due to shutdown:\nin retry ORM engine initialization") - default: - } - log.Info("ORM engine initialization attempt #%d/%d...", i+1, setting.Database.DBConnectRetries) - if err = models.NewEngine(ctx, migrations.Migrate); err == nil { - break - } else if i == setting.Database.DBConnectRetries-1 { - return err - } - log.Error("ORM engine initialization attempt #%d/%d failed. Error: %v", i+1, setting.Database.DBConnectRetries, err) - log.Info("Backing off for %d seconds", int64(setting.Database.DBConnectBackoff/time.Second)) - time.Sleep(setting.Database.DBConnectBackoff) - } - models.HasEngine = true - return nil -} - -// PreInstallInit preloads the configuration to check if we need to run install -func PreInstallInit(ctx context.Context) bool { - setting.NewContext() - if !setting.InstallLock { - log.Trace("AppPath: %s", setting.AppPath) - log.Trace("AppWorkPath: %s", setting.AppWorkPath) - log.Trace("Custom path: %s", setting.CustomPath) - log.Trace("Log path: %s", setting.LogRootPath) - log.Trace("Preparing to run install page") - translation.InitLocales() - if setting.EnableSQLite3 { - log.Info("SQLite3 Supported") - } - setting.InitDBConfig() - svg.Init() + if err := cache.NewContext(); err != nil { + log.Fatal("Unable to start cache service: %v", err) } - - return !setting.InstallLock -} - -// PostInstallInit rereads the settings and starts up the database -func PostInstallInit(ctx context.Context) { - setting.NewContext() - setting.InitDBConfig() - if setting.InstallLock { - if err := initDBEngine(ctx); err == nil { - log.Info("ORM engine initialization successful!") - } else { - log.Fatal("ORM engine initialization failed: %v", err) - } - svg.Init() + notification.NewContext() + if err := archiver.Init(); err != nil { + log.Fatal("archiver init failed: %v", err) } } @@ -130,12 +71,14 @@ func GlobalInit(ctx context.Context) { if err := git.Init(ctx); err != nil { log.Fatal("Git module init failed: %v", err) } - setting.CheckLFSVersion() - log.Trace("AppPath: %s", setting.AppPath) - log.Trace("AppWorkPath: %s", setting.AppWorkPath) - log.Trace("Custom path: %s", setting.CustomPath) - log.Trace("Log path: %s", setting.LogRootPath) - checkRunMode() + log.Info(git.VersionInfo()) + + git.CheckLFSVersion() + log.Info("AppPath: %s", setting.AppPath) + log.Info("AppWorkPath: %s", setting.AppWorkPath) + log.Info("Custom path: %s", setting.CustomPath) + log.Info("Log path: %s", setting.LogRootPath) + log.Info("Run Mode: %s", strings.Title(setting.RunMode)) // Setup i18n translation.InitLocales() @@ -151,7 +94,7 @@ func GlobalInit(ctx context.Context) { } else if setting.Database.UseSQLite3 { log.Fatal("SQLite3 is set in settings but NOT Supported") } - if err := initDBEngine(ctx); err == nil { + if err := common.InitDBEngine(ctx); err == nil { log.Info("ORM engine initialization successful!") } else { log.Fatal("ORM engine initialization failed: %v", err) @@ -189,7 +132,20 @@ func GlobalInit(ctx context.Context) { } else { ssh.Unused() } - sso.Init() + auth.Init() svg.Init() } + +// NormalRoutes represents non install routes +func NormalRoutes() *web.Route { + r := web.NewRoute() + for _, middle := range common.Middlewares() { + r.Use(middle) + } + + r.Mount("/", web_routers.Routes()) + r.Mount("/api/v1", apiv1.Routes()) + r.Mount("/api/internal", private.Routes()) + return r +} diff --git a/routers/install.go b/routers/install/install.go similarity index 96% rename from routers/install.go rename to routers/install/install.go index ef53422c4e12..ad985cf18488 100644 --- a/routers/install.go +++ b/routers/install/install.go @@ -1,8 +1,9 @@ // Copyright 2014 The Gogs Authors. All rights reserved. +// Copyright 2021 The Gitea Authors. All rights reserved. // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package routers +package install import ( "fmt" @@ -21,6 +22,7 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/templates" + "code.gitea.io/gitea/modules/translation" "code.gitea.io/gitea/modules/user" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" @@ -37,8 +39,8 @@ const ( tplPostInstall base.TplName = "post-install" ) -// InstallInit prepare for rendering installation page -func InstallInit(next http.Handler) http.Handler { +// Init prepare for rendering installation page +func Init(next http.Handler) http.Handler { var rnd = templates.HTMLRenderer() return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { @@ -61,6 +63,8 @@ func InstallInit(next http.Handler) http.Handler { "DbOptions": setting.SupportedDatabases, "i18n": locale, "Language": locale.Language(), + "Lang": locale.Language(), + "AllLangs": translation.AllLangs(), "CurrentURL": setting.AppSubURL + req.URL.RequestURI(), "PageStartTime": startTime, "TmplLoadTimes": func() string { @@ -69,6 +73,12 @@ func InstallInit(next http.Handler) http.Handler { "PasswordHashAlgorithms": models.AvailableHashAlgorithms, }, } + for _, lang := range translation.AllLangs() { + if lang.Lang == locale.Language() { + ctx.Data["LangName"] = lang.Name + break + } + } ctx.Req = context.WithContext(req, &ctx) next.ServeHTTP(resp, ctx.Req) }) @@ -149,8 +159,8 @@ func Install(ctx *context.Context) { ctx.HTML(http.StatusOK, tplInstall) } -// InstallPost response for submit install items -func InstallPost(ctx *context.Context) { +// SubmitInstall response for submit install items +func SubmitInstall(ctx *context.Context) { form := *web.GetForm(ctx).(*forms.InstallForm) var err error ctx.Data["CurDbOption"] = form.DbType @@ -333,7 +343,7 @@ func InstallPost(ctx *context.Context) { cfg.Section("server").Key("LFS_START_SERVER").SetValue("true") cfg.Section("server").Key("LFS_CONTENT_PATH").SetValue(form.LFSRootPath) var secretKey string - if secretKey, err = generate.NewJwtSecret(); err != nil { + if secretKey, err = generate.NewJwtSecretBase64(); err != nil { ctx.RenderWithErr(ctx.Tr("install.lfs_jwt_secret_failed", err), tplInstall, &form) return } @@ -400,7 +410,7 @@ func InstallPost(ctx *context.Context) { } // Re-read settings - PostInstallInit(ctx.Req.Context()) + ReloadSettings(ctx) // Create admin account if len(form.AdminName) > 0 { @@ -454,7 +464,7 @@ func InstallPost(ctx *context.Context) { // Now get the http.Server from this request and shut it down // NB: This is not our hammerable graceful shutdown this is http.Server.Shutdown - srv := ctx.Req.Context().Value(http.ServerContextKey).(*http.Server) + srv := ctx.Value(http.ServerContextKey).(*http.Server) go func() { if err := srv.Shutdown(graceful.GetManager().HammerContext()); err != nil { log.Error("Unable to shutdown the install server! Error: %v", err) diff --git a/routers/routes/install.go b/routers/install/routes.go similarity index 72% rename from routers/routes/install.go rename to routers/install/routes.go index 026d92b13ece..36130d4b3f39 100644 --- a/routers/routes/install.go +++ b/routers/install/routes.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package routes +package install import ( "fmt" @@ -15,12 +15,18 @@ import ( "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/modules/web/middleware" - "code.gitea.io/gitea/routers" + "code.gitea.io/gitea/routers/common" "code.gitea.io/gitea/services/forms" "gitea.com/go-chi/session" ) +type dataStore map[string]interface{} + +func (d *dataStore) GetData() map[string]interface{} { + return *d +} + func installRecovery() func(next http.Handler) http.Handler { var rnd = templates.HTMLRenderer() return func(next http.Handler) http.Handler { @@ -48,21 +54,19 @@ func installRecovery() func(next http.Handler) http.Handler { lc := middleware.Locale(w, req) var store = dataStore{ - Data: templates.Vars{ - "Language": lc.Language(), - "CurrentURL": setting.AppSubURL + req.URL.RequestURI(), - "i18n": lc, - "SignedUserID": int64(0), - "SignedUserName": "", - }, + "Language": lc.Language(), + "CurrentURL": setting.AppSubURL + req.URL.RequestURI(), + "i18n": lc, + "SignedUserID": int64(0), + "SignedUserName": "", } w.Header().Set(`X-Frame-Options`, `SAMEORIGIN`) if !setting.IsProd() { - store.Data["ErrorMsg"] = combinedErr + store["ErrorMsg"] = combinedErr } - err = rnd.HTML(w, 500, "status/500", templates.BaseVars().Merge(store.Data)) + err = rnd.HTML(w, 500, "status/500", templates.BaseVars().Merge(store)) if err != nil { log.Error("%v", err) } @@ -74,13 +78,18 @@ func installRecovery() func(next http.Handler) http.Handler { } } -// InstallRoutes registers the install routes -func InstallRoutes() *web.Route { +// Routes registers the install routes +func Routes() *web.Route { r := web.NewRoute() - for _, middle := range commonMiddlewares() { + for _, middle := range common.Middlewares() { r.Use(middle) } + r.Use(public.AssetsHandler(&public.Options{ + Directory: path.Join(setting.StaticRootPath, "public"), + Prefix: "/assets", + })) + r.Use(session.Sessioner(session.Options{ Provider: setting.SessionConfig.Provider, ProviderConfig: setting.SessionConfig.ProviderConfig, @@ -89,29 +98,16 @@ func InstallRoutes() *web.Route { Gclifetime: setting.SessionConfig.Gclifetime, Maxlifetime: setting.SessionConfig.Maxlifetime, Secure: setting.SessionConfig.Secure, + SameSite: setting.SessionConfig.SameSite, Domain: setting.SessionConfig.Domain, })) r.Use(installRecovery()) - - r.Use(public.Custom( - &public.Options{ - SkipLogging: setting.DisableRouterLog, - }, - )) - r.Use(public.Static( - &public.Options{ - Directory: path.Join(setting.StaticRootPath, "public"), - SkipLogging: setting.DisableRouterLog, - Prefix: "/assets", - }, - )) - - r.Use(routers.InstallInit) - r.Get("/", routers.Install) - r.Post("/", web.Bind(forms.InstallForm{}), routers.InstallPost) + r.Use(Init) + r.Get("/", Install) + r.Post("/", web.Bind(forms.InstallForm{}), SubmitInstall) r.NotFound(func(w http.ResponseWriter, req *http.Request) { - http.Redirect(w, req, setting.AppURL, 302) + http.Redirect(w, req, setting.AppURL, http.StatusFound) }) return r } diff --git a/routers/install/routes_test.go b/routers/install/routes_test.go new file mode 100644 index 000000000000..35a66c1c4742 --- /dev/null +++ b/routers/install/routes_test.go @@ -0,0 +1,20 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package install + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRoutes(t *testing.T) { + routes := Routes() + assert.NotNil(t, routes) + assert.Len(t, routes.R.Routes(), 1) + assert.EqualValues(t, "/", routes.R.Routes()[0].Pattern) + assert.Nil(t, routes.R.Routes()[0].SubRoutes) + assert.Len(t, routes.R.Routes()[0].Handlers, 2) +} diff --git a/routers/install/setting.go b/routers/install/setting.go new file mode 100644 index 000000000000..7b9b7bd8c225 --- /dev/null +++ b/routers/install/setting.go @@ -0,0 +1,50 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package install + +import ( + "context" + + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/svg" + "code.gitea.io/gitea/modules/translation" + "code.gitea.io/gitea/routers/common" +) + +// PreloadSettings preloads the configuration to check if we need to run install +func PreloadSettings(ctx context.Context) bool { + setting.NewContext() + if !setting.InstallLock { + log.Info("AppPath: %s", setting.AppPath) + log.Info("AppWorkPath: %s", setting.AppWorkPath) + log.Info("Custom path: %s", setting.CustomPath) + log.Info("Log path: %s", setting.LogRootPath) + log.Info("Preparing to run install page") + translation.InitLocales() + if setting.EnableSQLite3 { + log.Info("SQLite3 Supported") + } + setting.InitDBConfig() + setting.NewServicesForInstall() + svg.Init() + } + + return !setting.InstallLock +} + +// ReloadSettings rereads the settings and starts up the database +func ReloadSettings(ctx context.Context) { + setting.NewContext() + setting.InitDBConfig() + if setting.InstallLock { + if err := common.InitDBEngine(ctx); err == nil { + log.Info("ORM engine initialization successful!") + } else { + log.Fatal("ORM engine initialization failed: %v", err) + } + svg.Init() + } +} diff --git a/routers/private/hook.go b/routers/private/hook.go index 83c3f21b8f42..9f5579b6ae68 100644 --- a/routers/private/hook.go +++ b/routers/private/hook.go @@ -124,8 +124,8 @@ func HookPreReceive(ctx *gitea_context.PrivateContext) { repo, err := models.GetRepositoryByOwnerAndName(ownerName, repoName) if err != nil { log.Error("Unable to get repository: %s/%s Error: %v", ownerName, repoName, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": err.Error(), + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: err.Error(), }) return } @@ -133,8 +133,8 @@ func HookPreReceive(ctx *gitea_context.PrivateContext) { gitRepo, err := git.OpenRepository(repo.RepoPath()) if err != nil { log.Error("Unable to get git repository for: %s/%s Error: %v", ownerName, repoName, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": err.Error(), + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: err.Error(), }) return } @@ -155,215 +155,248 @@ func HookPreReceive(ctx *gitea_context.PrivateContext) { private.GitQuarantinePath+"="+opts.GitQuarantinePath) } + protectedTags, err := repo.GetProtectedTags() + if err != nil { + log.Error("Unable to get protected tags for %-v Error: %v", repo, err) + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: err.Error(), + }) + return + } + // Iterate across the provided old commit IDs for i := range opts.OldCommitIDs { oldCommitID := opts.OldCommitIDs[i] newCommitID := opts.NewCommitIDs[i] refFullName := opts.RefFullNames[i] - branchName := strings.TrimPrefix(refFullName, git.BranchPrefix) - if branchName == repo.DefaultBranch && newCommitID == git.EmptySHA { - log.Warn("Forbidden: Branch: %s is the default branch in %-v and cannot be deleted", branchName, repo) - ctx.JSON(http.StatusForbidden, map[string]interface{}{ - "err": fmt.Sprintf("branch %s is the default branch and cannot be deleted", branchName), - }) - return - } - - protectBranch, err := models.GetProtectedBranchBy(repo.ID, branchName) - if err != nil { - log.Error("Unable to get protected branch: %s in %-v Error: %v", branchName, repo, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": err.Error(), - }) - return - } - - // Allow pushes to non-protected branches - if protectBranch == nil || !protectBranch.IsProtected() { - continue - } - - // This ref is a protected branch. - // - // First of all we need to enforce absolutely: - // - // 1. Detect and prevent deletion of the branch - if newCommitID == git.EmptySHA { - log.Warn("Forbidden: Branch: %s in %-v is protected from deletion", branchName, repo) - ctx.JSON(http.StatusForbidden, map[string]interface{}{ - "err": fmt.Sprintf("branch %s is protected from deletion", branchName), - }) - return - } + if strings.HasPrefix(refFullName, git.BranchPrefix) { + branchName := strings.TrimPrefix(refFullName, git.BranchPrefix) + if branchName == repo.DefaultBranch && newCommitID == git.EmptySHA { + log.Warn("Forbidden: Branch: %s is the default branch in %-v and cannot be deleted", branchName, repo) + ctx.JSON(http.StatusForbidden, private.Response{ + Err: fmt.Sprintf("branch %s is the default branch and cannot be deleted", branchName), + }) + return + } - // 2. Disallow force pushes to protected branches - if git.EmptySHA != oldCommitID { - output, err := git.NewCommand("rev-list", "--max-count=1", oldCommitID, "^"+newCommitID).RunInDirWithEnv(repo.RepoPath(), env) + protectBranch, err := models.GetProtectedBranchBy(repo.ID, branchName) if err != nil { - log.Error("Unable to detect force push between: %s and %s in %-v Error: %v", oldCommitID, newCommitID, repo, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": fmt.Sprintf("Fail to detect force push: %v", err), + log.Error("Unable to get protected branch: %s in %-v Error: %v", branchName, repo, err) + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: err.Error(), }) return - } else if len(output) > 0 { - log.Warn("Forbidden: Branch: %s in %-v is protected from force push", branchName, repo) - ctx.JSON(http.StatusForbidden, map[string]interface{}{ - "err": fmt.Sprintf("branch %s is protected from force push", branchName), + } + + // Allow pushes to non-protected branches + if protectBranch == nil || !protectBranch.IsProtected() { + continue + } + + // This ref is a protected branch. + // + // First of all we need to enforce absolutely: + // + // 1. Detect and prevent deletion of the branch + if newCommitID == git.EmptySHA { + log.Warn("Forbidden: Branch: %s in %-v is protected from deletion", branchName, repo) + ctx.JSON(http.StatusForbidden, private.Response{ + Err: fmt.Sprintf("branch %s is protected from deletion", branchName), }) return - } - } - // 3. Enforce require signed commits - if protectBranch.RequireSignedCommits { - err := verifyCommits(oldCommitID, newCommitID, gitRepo, env) - if err != nil { - if !isErrUnverifiedCommit(err) { - log.Error("Unable to check commits from %s to %s in %-v: %v", oldCommitID, newCommitID, repo, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": fmt.Sprintf("Unable to check commits from %s to %s: %v", oldCommitID, newCommitID, err), + // 2. Disallow force pushes to protected branches + if git.EmptySHA != oldCommitID { + output, err := git.NewCommand("rev-list", "--max-count=1", oldCommitID, "^"+newCommitID).RunInDirWithEnv(repo.RepoPath(), env) + if err != nil { + log.Error("Unable to detect force push between: %s and %s in %-v Error: %v", oldCommitID, newCommitID, repo, err) + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Fail to detect force push: %v", err), }) return + } else if len(output) > 0 { + log.Warn("Forbidden: Branch: %s in %-v is protected from force push", branchName, repo) + ctx.JSON(http.StatusForbidden, private.Response{ + Err: fmt.Sprintf("branch %s is protected from force push", branchName), + }) + return + } - unverifiedCommit := err.(*errUnverifiedCommit).sha - log.Warn("Forbidden: Branch: %s in %-v is protected from unverified commit %s", branchName, repo, unverifiedCommit) - ctx.JSON(http.StatusForbidden, map[string]interface{}{ - "err": fmt.Sprintf("branch %s is protected from unverified commit %s", branchName, unverifiedCommit), - }) - return } - } - // Now there are several tests which can be overridden: - // - // 4. Check protected file patterns - this is overridable from the UI - changedProtectedfiles := false - protectedFilePath := "" - - globs := protectBranch.GetProtectedFilePatterns() - if len(globs) > 0 { - _, err := pull_service.CheckFileProtection(oldCommitID, newCommitID, globs, 1, env, gitRepo) - if err != nil { - if !models.IsErrFilePathProtected(err) { - log.Error("Unable to check file protection for commits from %s to %s in %-v: %v", oldCommitID, newCommitID, repo, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": fmt.Sprintf("Unable to check file protection for commits from %s to %s: %v", oldCommitID, newCommitID, err), + // 3. Enforce require signed commits + if protectBranch.RequireSignedCommits { + err := verifyCommits(oldCommitID, newCommitID, gitRepo, env) + if err != nil { + if !isErrUnverifiedCommit(err) { + log.Error("Unable to check commits from %s to %s in %-v: %v", oldCommitID, newCommitID, repo, err) + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Unable to check commits from %s to %s: %v", oldCommitID, newCommitID, err), + }) + return + } + unverifiedCommit := err.(*errUnverifiedCommit).sha + log.Warn("Forbidden: Branch: %s in %-v is protected from unverified commit %s", branchName, repo, unverifiedCommit) + ctx.JSON(http.StatusForbidden, private.Response{ + Err: fmt.Sprintf("branch %s is protected from unverified commit %s", branchName, unverifiedCommit), }) return } + } + + // Now there are several tests which can be overridden: + // + // 4. Check protected file patterns - this is overridable from the UI + changedProtectedfiles := false + protectedFilePath := "" - changedProtectedfiles = true - protectedFilePath = err.(models.ErrFilePathProtected).Path + globs := protectBranch.GetProtectedFilePatterns() + if len(globs) > 0 { + _, err := pull_service.CheckFileProtection(oldCommitID, newCommitID, globs, 1, env, gitRepo) + if err != nil { + if !models.IsErrFilePathProtected(err) { + log.Error("Unable to check file protection for commits from %s to %s in %-v: %v", oldCommitID, newCommitID, repo, err) + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Unable to check file protection for commits from %s to %s: %v", oldCommitID, newCommitID, err), + }) + return + } + + changedProtectedfiles = true + protectedFilePath = err.(models.ErrFilePathProtected).Path + } } - } - // 5. Check if the doer is allowed to push - canPush := false - if opts.IsDeployKey { - canPush = !changedProtectedfiles && protectBranch.CanPush && (!protectBranch.EnableWhitelist || protectBranch.WhitelistDeployKeys) - } else { - canPush = !changedProtectedfiles && protectBranch.CanUserPush(opts.UserID) - } + // 5. Check if the doer is allowed to push + canPush := false + if opts.IsDeployKey { + canPush = !changedProtectedfiles && protectBranch.CanPush && (!protectBranch.EnableWhitelist || protectBranch.WhitelistDeployKeys) + } else { + canPush = !changedProtectedfiles && protectBranch.CanUserPush(opts.UserID) + } - // 6. If we're not allowed to push directly - if !canPush { - // Is this is a merge from the UI/API? - if opts.ProtectedBranchID == 0 { - // 6a. If we're not merging from the UI/API then there are two ways we got here: - // - // We are changing a protected file and we're not allowed to do that - if changedProtectedfiles { - log.Warn("Forbidden: Branch: %s in %-v is protected from changing file %s", branchName, repo, protectedFilePath) - ctx.JSON(http.StatusForbidden, map[string]interface{}{ - "err": fmt.Sprintf("branch %s is protected from changing file %s", branchName, protectedFilePath), + // 6. If we're not allowed to push directly + if !canPush { + // Is this is a merge from the UI/API? + if opts.PullRequestID == 0 { + // 6a. If we're not merging from the UI/API then there are two ways we got here: + // + // We are changing a protected file and we're not allowed to do that + if changedProtectedfiles { + log.Warn("Forbidden: Branch: %s in %-v is protected from changing file %s", branchName, repo, protectedFilePath) + ctx.JSON(http.StatusForbidden, private.Response{ + Err: fmt.Sprintf("branch %s is protected from changing file %s", branchName, protectedFilePath), + }) + return + } + + // Or we're simply not able to push to this protected branch + log.Warn("Forbidden: User %d is not allowed to push to protected branch: %s in %-v", opts.UserID, branchName, repo) + ctx.JSON(http.StatusForbidden, private.Response{ + Err: fmt.Sprintf("Not allowed to push to protected branch %s", branchName), }) return } + // 6b. Merge (from UI or API) - // Or we're simply not able to push to this protected branch - log.Warn("Forbidden: User %d is not allowed to push to protected branch: %s in %-v", opts.UserID, branchName, repo) - ctx.JSON(http.StatusForbidden, map[string]interface{}{ - "err": fmt.Sprintf("Not allowed to push to protected branch %s", branchName), - }) - return - } - // 6b. Merge (from UI or API) + // Get the PR, user and permissions for the user in the repository + pr, err := models.GetPullRequestByID(opts.PullRequestID) + if err != nil { + log.Error("Unable to get PullRequest %d Error: %v", opts.PullRequestID, err) + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Unable to get PullRequest %d Error: %v", opts.PullRequestID, err), + }) + return + } + user, err := models.GetUserByID(opts.UserID) + if err != nil { + log.Error("Unable to get User id %d Error: %v", opts.UserID, err) + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Unable to get User id %d Error: %v", opts.UserID, err), + }) + return + } + perm, err := models.GetUserRepoPermission(repo, user) + if err != nil { + log.Error("Unable to get Repo permission of repo %s/%s of User %s", repo.OwnerName, repo.Name, user.Name, err) + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Unable to get Repo permission of repo %s/%s of User %s: %v", repo.OwnerName, repo.Name, user.Name, err), + }) + return + } - // Get the PR, user and permissions for the user in the repository - pr, err := models.GetPullRequestByID(opts.ProtectedBranchID) - if err != nil { - log.Error("Unable to get PullRequest %d Error: %v", opts.ProtectedBranchID, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": fmt.Sprintf("Unable to get PullRequest %d Error: %v", opts.ProtectedBranchID, err), - }) - return - } - user, err := models.GetUserByID(opts.UserID) - if err != nil { - log.Error("Unable to get User id %d Error: %v", opts.UserID, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": fmt.Sprintf("Unable to get User id %d Error: %v", opts.UserID, err), - }) - return - } - perm, err := models.GetUserRepoPermission(repo, user) - if err != nil { - log.Error("Unable to get Repo permission of repo %s/%s of User %s", repo.OwnerName, repo.Name, user.Name, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": fmt.Sprintf("Unable to get Repo permission of repo %s/%s of User %s: %v", repo.OwnerName, repo.Name, user.Name, err), - }) - return - } + // Now check if the user is allowed to merge PRs for this repository + allowedMerge, err := pull_service.IsUserAllowedToMerge(pr, perm, user) + if err != nil { + log.Error("Error calculating if allowed to merge: %v", err) + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Error calculating if allowed to merge: %v", err), + }) + return + } - // Now check if the user is allowed to merge PRs for this repository - allowedMerge, err := pull_service.IsUserAllowedToMerge(pr, perm, user) - if err != nil { - log.Error("Error calculating if allowed to merge: %v", err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": fmt.Sprintf("Error calculating if allowed to merge: %v", err), - }) - return - } + if !allowedMerge { + log.Warn("Forbidden: User %d is not allowed to push to protected branch: %s in %-v and is not allowed to merge pr #%d", opts.UserID, branchName, repo, pr.Index) + ctx.JSON(http.StatusForbidden, private.Response{ + Err: fmt.Sprintf("Not allowed to push to protected branch %s", branchName), + }) + return + } - if !allowedMerge { - log.Warn("Forbidden: User %d is not allowed to push to protected branch: %s in %-v and is not allowed to merge pr #%d", opts.UserID, branchName, repo, pr.Index) - ctx.JSON(http.StatusForbidden, map[string]interface{}{ - "err": fmt.Sprintf("Not allowed to push to protected branch %s", branchName), - }) - return - } + // If we're an admin for the repository we can ignore status checks, reviews and override protected files + if perm.IsAdmin() { + continue + } - // If we're an admin for the repository we can ignore status checks, reviews and override protected files - if perm.IsAdmin() { - continue + // Now if we're not an admin - we can't overwrite protected files so fail now + if changedProtectedfiles { + log.Warn("Forbidden: Branch: %s in %-v is protected from changing file %s", branchName, repo, protectedFilePath) + ctx.JSON(http.StatusForbidden, private.Response{ + Err: fmt.Sprintf("branch %s is protected from changing file %s", branchName, protectedFilePath), + }) + return + } + + // Check all status checks and reviews are ok + if err := pull_service.CheckPRReadyToMerge(pr, true); err != nil { + if models.IsErrNotAllowedToMerge(err) { + log.Warn("Forbidden: User %d is not allowed push to protected branch %s in %-v and pr #%d is not ready to be merged: %s", opts.UserID, branchName, repo, pr.Index, err.Error()) + ctx.JSON(http.StatusForbidden, private.Response{ + Err: fmt.Sprintf("Not allowed to push to protected branch %s and pr #%d is not ready to be merged: %s", branchName, opts.PullRequestID, err.Error()), + }) + return + } + log.Error("Unable to check if mergable: protected branch %s in %-v and pr #%d. Error: %v", opts.UserID, branchName, repo, pr.Index, err) + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Unable to get status of pull request %d. Error: %v", opts.PullRequestID, err), + }) + return + } } + } else if strings.HasPrefix(refFullName, git.TagPrefix) { + tagName := strings.TrimPrefix(refFullName, git.TagPrefix) - // Now if we're not an admin - we can't overwrite protected files so fail now - if changedProtectedfiles { - log.Warn("Forbidden: Branch: %s in %-v is protected from changing file %s", branchName, repo, protectedFilePath) - ctx.JSON(http.StatusForbidden, map[string]interface{}{ - "err": fmt.Sprintf("branch %s is protected from changing file %s", branchName, protectedFilePath), + isAllowed, err := models.IsUserAllowedToControlTag(protectedTags, tagName, opts.UserID) + if err != nil { + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: err.Error(), }) return } - - // Check all status checks and reviews are ok - if err := pull_service.CheckPRReadyToMerge(pr, true); err != nil { - if models.IsErrNotAllowedToMerge(err) { - log.Warn("Forbidden: User %d is not allowed push to protected branch %s in %-v and pr #%d is not ready to be merged: %s", opts.UserID, branchName, repo, pr.Index, err.Error()) - ctx.JSON(http.StatusForbidden, map[string]interface{}{ - "err": fmt.Sprintf("Not allowed to push to protected branch %s and pr #%d is not ready to be merged: %s", branchName, opts.ProtectedBranchID, err.Error()), - }) - return - } - log.Error("Unable to check if mergable: protected branch %s in %-v and pr #%d. Error: %v", opts.UserID, branchName, repo, pr.Index, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": fmt.Sprintf("Unable to get status of pull request %d. Error: %v", opts.ProtectedBranchID, err), + if !isAllowed { + log.Warn("Forbidden: Tag %s in %-v is protected", tagName, repo) + ctx.JSON(http.StatusForbidden, private.Response{ + Err: fmt.Sprintf("Tag %s is protected", tagName), }) return } + } else { + log.Error("Unexpected ref: %s", refFullName) + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Unexpected ref: %s", refFullName), + }) } } @@ -549,8 +582,8 @@ func SetDefaultBranch(ctx *gitea_context.PrivateContext) { repo, err := models.GetRepositoryByOwnerAndName(ownerName, repoName) if err != nil { log.Error("Failed to get repository: %s/%s Error: %v", ownerName, repoName, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "Err": fmt.Sprintf("Failed to get repository: %s/%s Error: %v", ownerName, repoName, err), + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Failed to get repository: %s/%s Error: %v", ownerName, repoName, err), }) return } @@ -561,16 +594,16 @@ func SetDefaultBranch(ctx *gitea_context.PrivateContext) { repo.DefaultBranch = branch gitRepo, err := git.OpenRepository(repo.RepoPath()) if err != nil { - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "Err": fmt.Sprintf("Failed to get git repository: %s/%s Error: %v", ownerName, repoName, err), + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Failed to get git repository: %s/%s Error: %v", ownerName, repoName, err), }) return } if err := gitRepo.SetDefaultBranch(repo.DefaultBranch); err != nil { if !git.IsErrUnsupportedVersion(err) { gitRepo.Close() - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "Err": fmt.Sprintf("Unable to set default branch on repository: %s/%s Error: %v", ownerName, repoName, err), + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Unable to set default branch on repository: %s/%s Error: %v", ownerName, repoName, err), }) return } @@ -578,10 +611,10 @@ func SetDefaultBranch(ctx *gitea_context.PrivateContext) { gitRepo.Close() if err := repo.UpdateDefaultBranch(); err != nil { - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "Err": fmt.Sprintf("Unable to set default branch on repository: %s/%s Error: %v", ownerName, repoName, err), + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Unable to set default branch on repository: %s/%s Error: %v", ownerName, repoName, err), }) return } - ctx.PlainText(200, []byte("success")) + ctx.PlainText(http.StatusOK, []byte("success")) } diff --git a/routers/private/internal.go b/routers/private/internal.go index e541591a3840..9202e6721881 100644 --- a/routers/private/internal.go +++ b/routers/private/internal.go @@ -23,7 +23,7 @@ import ( func CheckInternalToken(next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { tokens := req.Header.Get("Authorization") - fields := strings.Fields(tokens) + fields := strings.SplitN(tokens, " ", 2) if len(fields) != 2 || fields[0] != "Bearer" || fields[1] != setting.InternalToken { log.Debug("Forbidden attempt to access internal url: Authorization header: %s", tokens) http.Error(w, http.StatusText(http.StatusForbidden), http.StatusForbidden) @@ -55,6 +55,7 @@ func Routes() *web.Route { r.Post("/ssh/authorized_keys", AuthorizedPublicKeyByContent) r.Post("/ssh/{id}/update/{repoid}", UpdatePublicKeyInRepo) + r.Post("/ssh/log", bind(private.SSHLogOption{}), SSHLog) r.Post("/hook/pre-receive/{owner}/{repo}", bind(private.HookOptions{}), HookPreReceive) r.Post("/hook/post-receive/{owner}/{repo}", bind(private.HookOptions{}), HookPostReceive) r.Post("/hook/set-default-branch/{owner}/{repo}/{branch}", SetDefaultBranch) @@ -69,6 +70,7 @@ func Routes() *web.Route { r.Post("/manager/add-logger", bind(private.LoggerOptions{}), AddLogger) r.Post("/manager/remove-logger/{group}/{name}", RemoveLogger) r.Post("/mail/send", SendEmail) + r.Post("/restore_repo", RestoreRepo) return r } diff --git a/routers/private/key.go b/routers/private/key.go index b90faa22a4fb..2bb319083ff4 100644 --- a/routers/private/key.go +++ b/routers/private/key.go @@ -10,6 +10,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/private" "code.gitea.io/gitea/modules/timeutil" ) @@ -18,8 +19,8 @@ func UpdatePublicKeyInRepo(ctx *context.PrivateContext) { keyID := ctx.ParamsInt64(":id") repoID := ctx.ParamsInt64(":repoid") if err := models.UpdatePublicKeyUpdated(keyID); err != nil { - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": err.Error(), + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: err.Error(), }) return } @@ -27,18 +28,18 @@ func UpdatePublicKeyInRepo(ctx *context.PrivateContext) { deployKey, err := models.GetDeployKeyByRepo(keyID, repoID) if err != nil { if models.IsErrDeployKeyNotExist(err) { - ctx.PlainText(200, []byte("success")) + ctx.PlainText(http.StatusOK, []byte("success")) return } - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": err.Error(), + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: err.Error(), }) return } deployKey.UpdatedUnix = timeutil.TimeStampNow() if err = models.UpdateDeployKeyCols(deployKey, "updated_unix"); err != nil { - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": err.Error(), + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: err.Error(), }) return } @@ -53,8 +54,8 @@ func AuthorizedPublicKeyByContent(ctx *context.PrivateContext) { publicKey, err := models.SearchPublicKeyByContent(content) if err != nil { - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": err.Error(), + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: err.Error(), }) return } diff --git a/routers/private/mail.go b/routers/private/mail.go index cda442ea0478..d7bd5155f65a 100644 --- a/routers/private/mail.go +++ b/routers/private/mail.go @@ -23,8 +23,8 @@ import ( // It doesn't wait before each message will be processed func SendEmail(ctx *context.PrivateContext) { if setting.MailService == nil { - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": "Mail service is not enabled.", + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: "Mail service is not enabled.", }) return } @@ -35,8 +35,8 @@ func SendEmail(ctx *context.PrivateContext) { json := jsoniter.ConfigCompatibleWithStandardLibrary if err := json.NewDecoder(rd).Decode(&mail); err != nil { log.Error("%v", err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": err, + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: err.Error(), }) return } @@ -48,8 +48,8 @@ func SendEmail(ctx *context.PrivateContext) { if err != nil { err := fmt.Sprintf("Failed to get user information: %v", err) log.Error(err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": err, + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: err, }) return } @@ -68,8 +68,8 @@ func SendEmail(ctx *context.PrivateContext) { if err != nil { err := fmt.Sprintf("Failed to find users: %v", err) log.Error(err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": err, + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: err, }) return } diff --git a/routers/private/manager.go b/routers/private/manager.go index 192c4947e76a..7d010f3f8180 100644 --- a/routers/private/manager.go +++ b/routers/private/manager.go @@ -30,15 +30,15 @@ func FlushQueues(ctx *context.PrivateContext) { log.Error("Flushing request timed-out with error: %v", err) } }() - ctx.JSON(http.StatusAccepted, map[string]interface{}{ - "err": "Flushing", + ctx.JSON(http.StatusAccepted, private.Response{ + Err: "Flushing", }) return } - err := queue.GetManager().FlushAll(ctx.Req.Context(), opts.Timeout) + err := queue.GetManager().FlushAll(ctx, opts.Timeout) if err != nil { - ctx.JSON(http.StatusRequestTimeout, map[string]interface{}{ - "err": fmt.Sprintf("%v", err), + ctx.JSON(http.StatusRequestTimeout, private.Response{ + Err: fmt.Sprintf("%v", err), }) } ctx.PlainText(http.StatusOK, []byte("success")) @@ -59,8 +59,8 @@ func ResumeLogging(ctx *context.PrivateContext) { // ReleaseReopenLogging releases and reopens logging files func ReleaseReopenLogging(ctx *context.PrivateContext) { if err := log.ReleaseReopen(); err != nil { - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": fmt.Sprintf("Error during release and reopen: %v", err), + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Error during release and reopen: %v", err), }) return } @@ -73,8 +73,8 @@ func RemoveLogger(ctx *context.PrivateContext) { name := ctx.Params("name") ok, err := log.GetLogger(group).DelLogger(name) if err != nil { - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": fmt.Sprintf("Failed to remove logger: %s %s %v", group, name, err), + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Failed to remove logger: %s %s %v", group, name, err), }) return } @@ -134,8 +134,8 @@ func AddLogger(ctx *context.PrivateContext) { byteConfig, err := json.Marshal(opts.Config) if err != nil { log.Error("Failed to marshal log configuration: %v %v", opts.Config, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": fmt.Sprintf("Failed to marshal log configuration: %v %v", opts.Config, err), + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Failed to marshal log configuration: %v %v", opts.Config, err), }) return } @@ -143,8 +143,8 @@ func AddLogger(ctx *context.PrivateContext) { if err := log.NewNamedLogger(opts.Group, bufferLen, opts.Name, opts.Mode, config); err != nil { log.Error("Failed to create new named logger: %s %v", config, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": fmt.Sprintf("Failed to create new named logger: %s %v", config, err), + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Failed to create new named logger: %s %v", config, err), }) return } diff --git a/routers/private/manager_windows.go b/routers/private/manager_windows.go index 244dbbe4df5b..f6c9b7ec8ff2 100644 --- a/routers/private/manager_windows.go +++ b/routers/private/manager_windows.go @@ -11,12 +11,13 @@ import ( "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/graceful" + "code.gitea.io/gitea/modules/private" ) // Restart is not implemented for Windows based servers as they can't fork func Restart(ctx *context.PrivateContext) { - ctx.JSON(http.StatusNotImplemented, map[string]interface{}{ - "err": "windows servers cannot be gracefully restarted - shutdown and restart manually", + ctx.JSON(http.StatusNotImplemented, private.Response{ + Err: "windows servers cannot be gracefully restarted - shutdown and restart manually", }) } diff --git a/routers/private/restore_repo.go b/routers/private/restore_repo.go new file mode 100644 index 000000000000..36d17dd95c23 --- /dev/null +++ b/routers/private/restore_repo.go @@ -0,0 +1,53 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package private + +import ( + "io/ioutil" + "net/http" + + myCtx "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/migrations" + "code.gitea.io/gitea/modules/private" + jsoniter "github.com/json-iterator/go" +) + +// RestoreRepo restore a repository from data +func RestoreRepo(ctx *myCtx.PrivateContext) { + json := jsoniter.ConfigCompatibleWithStandardLibrary + bs, err := ioutil.ReadAll(ctx.Req.Body) + if err != nil { + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: err.Error(), + }) + return + } + var params = struct { + RepoDir string + OwnerName string + RepoName string + Units []string + }{} + if err = json.Unmarshal(bs, ¶ms); err != nil { + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: err.Error(), + }) + return + } + + if err := migrations.RestoreRepository( + ctx, + params.RepoDir, + params.OwnerName, + params.RepoName, + params.Units, + ); err != nil { + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: err.Error(), + }) + } else { + ctx.Status(http.StatusOK) + } +} diff --git a/routers/private/serv.go b/routers/private/serv.go index 1461194e7f72..6e39790eb504 100644 --- a/routers/private/serv.go +++ b/routers/private/serv.go @@ -23,8 +23,8 @@ import ( func ServNoCommand(ctx *context.PrivateContext) { keyID := ctx.ParamsInt64(":keyid") if keyID <= 0 { - ctx.JSON(http.StatusBadRequest, map[string]interface{}{ - "err": fmt.Sprintf("Bad key id: %d", keyID), + ctx.JSON(http.StatusBadRequest, private.Response{ + Err: fmt.Sprintf("Bad key id: %d", keyID), }) } results := private.KeyAndOwner{} @@ -32,14 +32,14 @@ func ServNoCommand(ctx *context.PrivateContext) { key, err := models.GetPublicKeyByID(keyID) if err != nil { if models.IsErrKeyNotExist(err) { - ctx.JSON(http.StatusUnauthorized, map[string]interface{}{ - "err": fmt.Sprintf("Cannot find key: %d", keyID), + ctx.JSON(http.StatusUnauthorized, private.Response{ + Err: fmt.Sprintf("Cannot find key: %d", keyID), }) return } log.Error("Unable to get public key: %d Error: %v", keyID, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": err.Error(), + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: err.Error(), }) return } @@ -49,20 +49,20 @@ func ServNoCommand(ctx *context.PrivateContext) { user, err := models.GetUserByID(key.OwnerID) if err != nil { if models.IsErrUserNotExist(err) { - ctx.JSON(http.StatusUnauthorized, map[string]interface{}{ - "err": fmt.Sprintf("Cannot find owner with id: %d for key: %d", key.OwnerID, keyID), + ctx.JSON(http.StatusUnauthorized, private.Response{ + Err: fmt.Sprintf("Cannot find owner with id: %d for key: %d", key.OwnerID, keyID), }) return } log.Error("Unable to get owner with id: %d for public key: %d Error: %v", key.OwnerID, keyID, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "err": err.Error(), + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: err.Error(), }) return } if !user.IsActive || user.ProhibitLogin { - ctx.JSON(http.StatusForbidden, map[string]interface{}{ - "err": "Your account is disabled.", + ctx.JSON(http.StatusForbidden, private.Response{ + Err: "Your account is disabled.", }) return } @@ -106,18 +106,16 @@ func ServCommand(ctx *context.PrivateContext) { owner, err := models.GetUserByName(results.OwnerName) if err != nil { log.Error("Unable to get repository owner: %s/%s Error: %v", results.OwnerName, results.RepoName, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "results": results, - "type": "InternalServerError", - "err": fmt.Sprintf("Unable to get repository owner: %s/%s %v", results.OwnerName, results.RepoName, err), + ctx.JSON(http.StatusInternalServerError, private.ErrServCommand{ + Results: results, + Err: fmt.Sprintf("Unable to get repository owner: %s/%s %v", results.OwnerName, results.RepoName, err), }) return } if !owner.IsOrganization() && !owner.IsActive { - ctx.JSON(http.StatusForbidden, map[string]interface{}{ - "results": results, - "type": "ForbiddenError", - "err": "Repository cannot be accessed, you could retry it later", + ctx.JSON(http.StatusForbidden, private.ErrServCommand{ + Results: results, + Err: "Repository cannot be accessed, you could retry it later", }) return } @@ -132,20 +130,18 @@ func ServCommand(ctx *context.PrivateContext) { if "git-upload-pack" == verb { // User is fetching/cloning a non-existent repository log.Error("Failed authentication attempt (cannot find repository: %s/%s) from %s", results.OwnerName, results.RepoName, ctx.RemoteAddr()) - ctx.JSON(http.StatusNotFound, map[string]interface{}{ - "results": results, - "type": "ErrRepoNotExist", - "err": fmt.Sprintf("Cannot find repository: %s/%s", results.OwnerName, results.RepoName), + ctx.JSON(http.StatusNotFound, private.ErrServCommand{ + Results: results, + Err: fmt.Sprintf("Cannot find repository: %s/%s", results.OwnerName, results.RepoName), }) return } } } else { log.Error("Unable to get repository: %s/%s Error: %v", results.OwnerName, results.RepoName, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "results": results, - "type": "InternalServerError", - "err": fmt.Sprintf("Unable to get repository: %s/%s %v", results.OwnerName, results.RepoName, err), + ctx.JSON(http.StatusInternalServerError, private.ErrServCommand{ + Results: results, + Err: fmt.Sprintf("Unable to get repository: %s/%s %v", results.OwnerName, results.RepoName, err), }) return } @@ -157,20 +153,18 @@ func ServCommand(ctx *context.PrivateContext) { results.RepoID = repo.ID if repo.IsBeingCreated() { - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "results": results, - "type": "InternalServerError", - "err": "Repository is being created, you could retry after it finished", + ctx.JSON(http.StatusInternalServerError, private.ErrServCommand{ + Results: results, + Err: "Repository is being created, you could retry after it finished", }) return } // We can shortcut at this point if the repo is a mirror if mode > models.AccessModeRead && repo.IsMirror { - ctx.JSON(http.StatusUnauthorized, map[string]interface{}{ - "results": results, - "type": "ErrMirrorReadOnly", - "err": fmt.Sprintf("Mirror Repository %s/%s is read-only", results.OwnerName, results.RepoName), + ctx.JSON(http.StatusForbidden, private.ErrServCommand{ + Results: results, + Err: fmt.Sprintf("Mirror Repository %s/%s is read-only", results.OwnerName, results.RepoName), }) return } @@ -180,18 +174,16 @@ func ServCommand(ctx *context.PrivateContext) { key, err := models.GetPublicKeyByID(keyID) if err != nil { if models.IsErrKeyNotExist(err) { - ctx.JSON(http.StatusUnauthorized, map[string]interface{}{ - "results": results, - "type": "ErrKeyNotExist", - "err": fmt.Sprintf("Cannot find key: %d", keyID), + ctx.JSON(http.StatusNotFound, private.ErrServCommand{ + Results: results, + Err: fmt.Sprintf("Cannot find key: %d", keyID), }) return } log.Error("Unable to get public key: %d Error: %v", keyID, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "results": results, - "type": "InternalServerError", - "err": fmt.Sprintf("Unable to get key: %d Error: %v", keyID, err), + ctx.JSON(http.StatusInternalServerError, private.ErrServCommand{ + Results: results, + Err: fmt.Sprintf("Unable to get key: %d Error: %v", keyID, err), }) return } @@ -201,10 +193,9 @@ func ServCommand(ctx *context.PrivateContext) { // If repo doesn't exist, deploy key doesn't make sense if !repoExist && key.Type == models.KeyTypeDeploy { - ctx.JSON(http.StatusNotFound, map[string]interface{}{ - "results": results, - "type": "ErrRepoNotExist", - "err": fmt.Sprintf("Cannot find repository %s/%s", results.OwnerName, results.RepoName), + ctx.JSON(http.StatusNotFound, private.ErrServCommand{ + Results: results, + Err: fmt.Sprintf("Cannot find repository %s/%s", results.OwnerName, results.RepoName), }) return } @@ -221,18 +212,16 @@ func ServCommand(ctx *context.PrivateContext) { deployKey, err = models.GetDeployKeyByRepo(key.ID, repo.ID) if err != nil { if models.IsErrDeployKeyNotExist(err) { - ctx.JSON(http.StatusUnauthorized, map[string]interface{}{ - "results": results, - "type": "ErrDeployKeyNotExist", - "err": fmt.Sprintf("Public (Deploy) Key: %d:%s is not authorized to %s %s/%s.", key.ID, key.Name, modeString, results.OwnerName, results.RepoName), + ctx.JSON(http.StatusNotFound, private.ErrServCommand{ + Results: results, + Err: fmt.Sprintf("Public (Deploy) Key: %d:%s is not authorized to %s %s/%s.", key.ID, key.Name, modeString, results.OwnerName, results.RepoName), }) return } log.Error("Unable to get deploy for public (deploy) key: %d in %-v Error: %v", key.ID, repo, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "results": results, - "type": "InternalServerError", - "err": fmt.Sprintf("Unable to get Deploy Key for Public Key: %d:%s in %s/%s.", key.ID, key.Name, results.OwnerName, results.RepoName), + ctx.JSON(http.StatusInternalServerError, private.ErrServCommand{ + Results: results, + Err: fmt.Sprintf("Unable to get Deploy Key for Public Key: %d:%s in %s/%s.", key.ID, key.Name, results.OwnerName, results.RepoName), }) return } @@ -252,25 +241,23 @@ func ServCommand(ctx *context.PrivateContext) { user, err = models.GetUserByID(key.OwnerID) if err != nil { if models.IsErrUserNotExist(err) { - ctx.JSON(http.StatusUnauthorized, map[string]interface{}{ - "results": results, - "type": "ErrUserNotExist", - "err": fmt.Sprintf("Public Key: %d:%s owner %d does not exist.", key.ID, key.Name, key.OwnerID), + ctx.JSON(http.StatusUnauthorized, private.ErrServCommand{ + Results: results, + Err: fmt.Sprintf("Public Key: %d:%s owner %d does not exist.", key.ID, key.Name, key.OwnerID), }) return } log.Error("Unable to get owner: %d for public key: %d:%s Error: %v", key.OwnerID, key.ID, key.Name, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "results": results, - "type": "InternalServerError", - "err": fmt.Sprintf("Unable to get Owner: %d for Deploy Key: %d:%s in %s/%s.", key.OwnerID, key.ID, key.Name, ownerName, repoName), + ctx.JSON(http.StatusInternalServerError, private.ErrServCommand{ + Results: results, + Err: fmt.Sprintf("Unable to get Owner: %d for Deploy Key: %d:%s in %s/%s.", key.OwnerID, key.ID, key.Name, ownerName, repoName), }) return } if !user.IsActive || user.ProhibitLogin { - ctx.JSON(http.StatusForbidden, map[string]interface{}{ - "err": "Your account is disabled.", + ctx.JSON(http.StatusForbidden, private.Response{ + Err: "Your account is disabled.", }) return } @@ -283,10 +270,9 @@ func ServCommand(ctx *context.PrivateContext) { // Don't allow pushing if the repo is archived if repoExist && mode > models.AccessModeRead && repo.IsArchived { - ctx.JSON(http.StatusUnauthorized, map[string]interface{}{ - "results": results, - "type": "ErrRepoIsArchived", - "err": fmt.Sprintf("Repo: %s/%s is archived.", results.OwnerName, results.RepoName), + ctx.JSON(http.StatusUnauthorized, private.ErrServCommand{ + Results: results, + Err: fmt.Sprintf("Repo: %s/%s is archived.", results.OwnerName, results.RepoName), }) return } @@ -295,10 +281,9 @@ func ServCommand(ctx *context.PrivateContext) { if repoExist && (mode > models.AccessModeRead || repo.IsPrivate || setting.Service.RequireSignInView) { if key.Type == models.KeyTypeDeploy { if deployKey.Mode < mode { - ctx.JSON(http.StatusUnauthorized, map[string]interface{}{ - "results": results, - "type": "ErrUnauthorized", - "err": fmt.Sprintf("Deploy Key: %d:%s is not authorized to %s %s/%s.", key.ID, key.Name, modeString, results.OwnerName, results.RepoName), + ctx.JSON(http.StatusUnauthorized, private.ErrServCommand{ + Results: results, + Err: fmt.Sprintf("Deploy Key: %d:%s is not authorized to %s %s/%s.", key.ID, key.Name, modeString, results.OwnerName, results.RepoName), }) return } @@ -306,10 +291,9 @@ func ServCommand(ctx *context.PrivateContext) { perm, err := models.GetUserRepoPermission(repo, user) if err != nil { log.Error("Unable to get permissions for %-v with key %d in %-v Error: %v", user, key.ID, repo, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "results": results, - "type": "InternalServerError", - "err": fmt.Sprintf("Unable to get permissions for user %d:%s with key %d in %s/%s Error: %v", user.ID, user.Name, key.ID, results.OwnerName, results.RepoName, err), + ctx.JSON(http.StatusInternalServerError, private.ErrServCommand{ + Results: results, + Err: fmt.Sprintf("Unable to get permissions for user %d:%s with key %d in %s/%s Error: %v", user.ID, user.Name, key.ID, results.OwnerName, results.RepoName, err), }) return } @@ -318,10 +302,9 @@ func ServCommand(ctx *context.PrivateContext) { if userMode < mode { log.Error("Failed authentication attempt for %s with key %s (not authorized to %s %s/%s) from %s", user.Name, key.Name, modeString, ownerName, repoName, ctx.RemoteAddr()) - ctx.JSON(http.StatusUnauthorized, map[string]interface{}{ - "results": results, - "type": "ErrUnauthorized", - "err": fmt.Sprintf("User: %d:%s with Key: %d:%s is not authorized to %s %s/%s.", user.ID, user.Name, key.ID, key.Name, modeString, ownerName, repoName), + ctx.JSON(http.StatusUnauthorized, private.ErrServCommand{ + Results: results, + Err: fmt.Sprintf("User: %d:%s with Key: %d:%s is not authorized to %s %s/%s.", user.ID, user.Name, key.ID, key.Name, modeString, ownerName, repoName), }) return } @@ -332,27 +315,24 @@ func ServCommand(ctx *context.PrivateContext) { if !repoExist { owner, err := models.GetUserByName(ownerName) if err != nil { - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "results": results, - "type": "InternalServerError", - "err": fmt.Sprintf("Unable to get owner: %s %v", results.OwnerName, err), + ctx.JSON(http.StatusInternalServerError, private.ErrServCommand{ + Results: results, + Err: fmt.Sprintf("Unable to get owner: %s %v", results.OwnerName, err), }) return } if owner.IsOrganization() && !setting.Repository.EnablePushCreateOrg { - ctx.JSON(http.StatusForbidden, map[string]interface{}{ - "results": results, - "type": "ErrForbidden", - "err": "Push to create is not enabled for organizations.", + ctx.JSON(http.StatusForbidden, private.ErrServCommand{ + Results: results, + Err: "Push to create is not enabled for organizations.", }) return } if !owner.IsOrganization() && !setting.Repository.EnablePushCreateUser { - ctx.JSON(http.StatusForbidden, map[string]interface{}{ - "results": results, - "type": "ErrForbidden", - "err": "Push to create is not enabled for users.", + ctx.JSON(http.StatusForbidden, private.ErrServCommand{ + Results: results, + Err: "Push to create is not enabled for users.", }) return } @@ -360,10 +340,9 @@ func ServCommand(ctx *context.PrivateContext) { repo, err = repo_service.PushCreateRepo(user, owner, results.RepoName) if err != nil { log.Error("pushCreateRepo: %v", err) - ctx.JSON(http.StatusNotFound, map[string]interface{}{ - "results": results, - "type": "ErrRepoNotExist", - "err": fmt.Sprintf("Cannot find repository: %s/%s", results.OwnerName, results.RepoName), + ctx.JSON(http.StatusNotFound, private.ErrServCommand{ + Results: results, + Err: fmt.Sprintf("Cannot find repository: %s/%s", results.OwnerName, results.RepoName), }) return } @@ -374,18 +353,16 @@ func ServCommand(ctx *context.PrivateContext) { // Ensure the wiki is enabled before we allow access to it if _, err := repo.GetUnit(models.UnitTypeWiki); err != nil { if models.IsErrUnitTypeNotExist(err) { - ctx.JSON(http.StatusForbidden, map[string]interface{}{ - "results": results, - "type": "ErrForbidden", - "err": "repository wiki is disabled", + ctx.JSON(http.StatusForbidden, private.ErrServCommand{ + Results: results, + Err: "repository wiki is disabled", }) return } log.Error("Failed to get the wiki unit in %-v Error: %v", repo, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "results": results, - "type": "InternalServerError", - "err": fmt.Sprintf("Failed to get the wiki unit in %s/%s Error: %v", ownerName, repoName, err), + ctx.JSON(http.StatusInternalServerError, private.ErrServCommand{ + Results: results, + Err: fmt.Sprintf("Failed to get the wiki unit in %s/%s Error: %v", ownerName, repoName, err), }) return } @@ -393,10 +370,9 @@ func ServCommand(ctx *context.PrivateContext) { // Finally if we're trying to touch the wiki we should init it if err = wiki_service.InitWiki(repo); err != nil { log.Error("Failed to initialize the wiki in %-v Error: %v", repo, err) - ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ - "results": results, - "type": "InternalServerError", - "err": fmt.Sprintf("Failed to initialize the wiki in %s/%s Error: %v", ownerName, repoName, err), + ctx.JSON(http.StatusInternalServerError, private.ErrServCommand{ + Results: results, + Err: fmt.Sprintf("Failed to initialize the wiki in %s/%s Error: %v", ownerName, repoName, err), }) return } diff --git a/routers/private/ssh_log.go b/routers/private/ssh_log.go new file mode 100644 index 000000000000..2f1793a0e0f4 --- /dev/null +++ b/routers/private/ssh_log.go @@ -0,0 +1,34 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package private + +import ( + "net/http" + + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/private" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/web" +) + +// SSHLog hook to response ssh log +func SSHLog(ctx *context.PrivateContext) { + if !setting.EnableSSHLog { + ctx.Status(http.StatusOK) + return + } + + opts := web.GetForm(ctx).(*private.SSHLogOption) + + if opts.IsError { + log.Error("ssh: %v", opts.Message) + ctx.Status(http.StatusOK) + return + } + + log.Debug("ssh: %v", opts.Message) + ctx.Status(http.StatusOK) +} diff --git a/routers/utils/utils_test.go b/routers/utils/utils_test.go index 78ab3d20ee4f..bca526331114 100644 --- a/routers/utils/utils_test.go +++ b/routers/utils/utils_test.go @@ -62,7 +62,41 @@ func TestIsExternalURL(t *testing.T) { "//try.gitea.io/test?param=false"), newTest(false, "/hey/hey/hey#3244"), + newTest(true, + "://missing protocol scheme"), } { assert.Equal(t, test.Expected, IsExternalURL(test.RawURL)) } } + +func TestSanitizeFlashErrorString(t *testing.T) { + tests := []struct { + name string + arg string + want string + }{ + { + name: "no error", + arg: "", + want: "", + }, + { + name: "normal error", + arg: "can not open file: \"abc.exe\"", + want: "can not open file: "abc.exe"", + }, + { + name: "line break error", + arg: "some error:\n\nawesome!", + want: "some error:

    awesome!", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := SanitizeFlashErrorString(tt.arg); got != tt.want { + t.Errorf("SanitizeFlashErrorString() = '%v', want '%v'", got, tt.want) + } + }) + } +} diff --git a/routers/admin/admin.go b/routers/web/admin/admin.go similarity index 100% rename from routers/admin/admin.go rename to routers/web/admin/admin.go diff --git a/routers/admin/admin_test.go b/routers/web/admin/admin_test.go similarity index 100% rename from routers/admin/admin_test.go rename to routers/web/admin/admin_test.go diff --git a/routers/admin/auths.go b/routers/web/admin/auths.go similarity index 99% rename from routers/admin/auths.go rename to routers/web/admin/auths.go index a0a20fa023bf..a2f9ab0a5c32 100644 --- a/routers/admin/auths.go +++ b/routers/web/admin/auths.go @@ -240,6 +240,7 @@ func NewAuthSourcePost(ctx *context.Context) { case models.LoginPAM: config = &models.PAMConfig{ ServiceName: form.PAMServiceName, + EmailDomain: form.PAMEmailDomain, } case models.LoginOAuth2: config = parseOAuth2Config(form) @@ -347,6 +348,7 @@ func EditAuthSourcePost(ctx *context.Context) { case models.LoginPAM: config = &models.PAMConfig{ ServiceName: form.PAMServiceName, + EmailDomain: form.PAMEmailDomain, } case models.LoginOAuth2: config = parseOAuth2Config(form) diff --git a/routers/admin/emails.go b/routers/web/admin/emails.go similarity index 96% rename from routers/admin/emails.go rename to routers/web/admin/emails.go index f7e8c97fb6b9..704cb88c640d 100644 --- a/routers/admin/emails.go +++ b/routers/web/admin/emails.go @@ -125,8 +125,8 @@ func ActivateEmail(ctx *context.Context) { log.Info("Changing activation for User ID: %d, email: %s, primary: %v to %v", uid, email, primary, activate) - if err := models.ActivateUserEmail(uid, email, primary, activate); err != nil { - log.Error("ActivateUserEmail(%v,%v,%v,%v): %v", uid, email, primary, activate, err) + if err := models.ActivateUserEmail(uid, email, activate); err != nil { + log.Error("ActivateUserEmail(%v,%v,%v): %v", uid, email, activate, err) if models.IsErrEmailAlreadyUsed(err) { ctx.Flash.Error(ctx.Tr("admin.emails.duplicate_active")) } else { diff --git a/routers/admin/hooks.go b/routers/web/admin/hooks.go similarity index 100% rename from routers/admin/hooks.go rename to routers/web/admin/hooks.go diff --git a/routers/user/setting/main_test.go b/routers/web/admin/main_test.go similarity index 95% rename from routers/user/setting/main_test.go rename to routers/web/admin/main_test.go index d343c02f484b..352907c73717 100644 --- a/routers/user/setting/main_test.go +++ b/routers/web/admin/main_test.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package setting +package admin import ( "path/filepath" diff --git a/routers/admin/notice.go b/routers/web/admin/notice.go similarity index 100% rename from routers/admin/notice.go rename to routers/web/admin/notice.go diff --git a/routers/admin/orgs.go b/routers/web/admin/orgs.go similarity index 84% rename from routers/admin/orgs.go rename to routers/web/admin/orgs.go index 627f56eaecdf..a2b3ed1bcc0f 100644 --- a/routers/admin/orgs.go +++ b/routers/web/admin/orgs.go @@ -11,7 +11,7 @@ import ( "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/structs" - "code.gitea.io/gitea/routers" + "code.gitea.io/gitea/routers/web/explore" ) const ( @@ -24,8 +24,9 @@ func Organizations(ctx *context.Context) { ctx.Data["PageIsAdmin"] = true ctx.Data["PageIsAdminOrganizations"] = true - routers.RenderUserSearch(ctx, &models.SearchUserOptions{ - Type: models.UserTypeOrganization, + explore.RenderUserSearch(ctx, &models.SearchUserOptions{ + Actor: ctx.User, + Type: models.UserTypeOrganization, ListOptions: models.ListOptions{ PageSize: setting.UI.Admin.OrgPagingNum, }, diff --git a/routers/admin/repos.go b/routers/web/admin/repos.go similarity index 94% rename from routers/admin/repos.go rename to routers/web/admin/repos.go index 82b8cc1a7db8..6128992f5a33 100644 --- a/routers/admin/repos.go +++ b/routers/web/admin/repos.go @@ -17,7 +17,7 @@ import ( "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" - "code.gitea.io/gitea/routers" + "code.gitea.io/gitea/routers/web/explore" repo_service "code.gitea.io/gitea/services/repository" ) @@ -32,7 +32,7 @@ func Repos(ctx *context.Context) { ctx.Data["PageIsAdmin"] = true ctx.Data["PageIsAdminRepositories"] = true - routers.RenderRepoSearch(ctx, &routers.RepoSearchOptions{ + explore.RenderRepoSearch(ctx, &explore.RepoSearchOptions{ Private: true, PageSize: setting.UI.Admin.RepoPagingNum, TplName: tplRepos, @@ -47,6 +47,10 @@ func DeleteRepo(ctx *context.Context) { return } + if ctx.Repo != nil && ctx.Repo.GitRepo != nil && ctx.Repo.Repository != nil && ctx.Repo.Repository.ID == repo.ID { + ctx.Repo.GitRepo.Close() + } + if err := repo_service.DeleteRepository(ctx.User, repo); err != nil { ctx.ServerError("DeleteRepository", err) return diff --git a/routers/admin/users.go b/routers/web/admin/users.go similarity index 91% rename from routers/admin/users.go rename to routers/web/admin/users.go index 3b29eeefc1af..acccc516bb45 100644 --- a/routers/admin/users.go +++ b/routers/web/admin/users.go @@ -18,8 +18,8 @@ import ( "code.gitea.io/gitea/modules/password" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/web" - "code.gitea.io/gitea/routers" - router_user_setting "code.gitea.io/gitea/routers/user/setting" + "code.gitea.io/gitea/routers/web/explore" + router_user_setting "code.gitea.io/gitea/routers/web/user/setting" "code.gitea.io/gitea/services/forms" "code.gitea.io/gitea/services/mailer" ) @@ -36,8 +36,9 @@ func Users(ctx *context.Context) { ctx.Data["PageIsAdmin"] = true ctx.Data["PageIsAdminUsers"] = true - routers.RenderUserSearch(ctx, &models.SearchUserOptions{ - Type: models.UserTypeIndividual, + explore.RenderUserSearch(ctx, &models.SearchUserOptions{ + Actor: ctx.User, + Type: models.UserTypeIndividual, ListOptions: models.ListOptions{ PageSize: setting.UI.Admin.UserPagingNum, }, @@ -50,6 +51,8 @@ func NewUser(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("admin.users.new_account") ctx.Data["PageIsAdmin"] = true ctx.Data["PageIsAdminUsers"] = true + ctx.Data["DefaultUserVisibilityMode"] = setting.Service.DefaultUserVisibilityMode + ctx.Data["AllowedUserVisibilityModes"] = setting.Service.AllowedUserVisibilityModesSlice.ToVisibleTypeSlice() ctx.Data["login_type"] = "0-0" @@ -70,6 +73,7 @@ func NewUserPost(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("admin.users.new_account") ctx.Data["PageIsAdmin"] = true ctx.Data["PageIsAdminUsers"] = true + ctx.Data["DefaultUserVisibilityMode"] = setting.Service.DefaultUserVisibilityMode sources, err := models.LoginSources() if err != nil { @@ -113,7 +117,7 @@ func NewUserPost(ctx *context.Context) { ctx.RenderWithErr(password.BuildComplexityError(ctx), tplUserNew, &form) return } - pwned, err := password.IsPwned(ctx.Req.Context(), form.Password) + pwned, err := password.IsPwned(ctx, form.Password) if pwned { ctx.Data["Err_Password"] = true errMsg := ctx.Tr("auth.password_pwned") @@ -126,7 +130,8 @@ func NewUserPost(ctx *context.Context) { } u.MustChangePassword = form.MustChangePassword } - if err := models.CreateUser(u); err != nil { + + if err := models.CreateUser(u, &models.CreateUserOverwriteOptions{Visibility: form.Visibility}); err != nil { switch { case models.IsErrUserAlreadyExist(err): ctx.Data["Err_UserName"] = true @@ -200,13 +205,14 @@ func prepareUserInfo(ctx *context.Context) *models.User { return u } -// EditUser show editting user page +// EditUser show editing user page func EditUser(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("admin.users.edit_account") ctx.Data["PageIsAdmin"] = true ctx.Data["PageIsAdminUsers"] = true ctx.Data["DisableRegularOrgCreation"] = setting.Admin.DisableRegularOrgCreation ctx.Data["DisableMigrations"] = setting.Repository.DisableMigrations + ctx.Data["AllowedUserVisibilityModes"] = setting.Service.AllowedUserVisibilityModesSlice.ToVisibleTypeSlice() prepareUserInfo(ctx) if ctx.Written() { @@ -216,7 +222,7 @@ func EditUser(ctx *context.Context) { ctx.HTML(http.StatusOK, tplUserEdit) } -// EditUserPost response for editting user +// EditUserPost response for editing user func EditUserPost(ctx *context.Context) { form := web.GetForm(ctx).(*forms.AdminEditUserForm) ctx.Data["Title"] = ctx.Tr("admin.users.edit_account") @@ -256,7 +262,7 @@ func EditUserPost(ctx *context.Context) { ctx.RenderWithErr(password.BuildComplexityError(ctx), tplUserEdit, &form) return } - pwned, err := password.IsPwned(ctx.Req.Context(), form.Password) + pwned, err := password.IsPwned(ctx, form.Password) if pwned { ctx.Data["Err_Password"] = true errMsg := ctx.Tr("auth.password_pwned") @@ -312,6 +318,8 @@ func EditUserPost(ctx *context.Context) { u.AllowImportLocal = form.AllowImportLocal u.AllowCreateOrganization = form.AllowCreateOrganization + u.Visibility = form.Visibility + // skip self Prohibit Login if ctx.User.ID == u.ID { u.ProhibitLogin = false diff --git a/routers/admin/users_test.go b/routers/web/admin/users_test.go similarity index 58% rename from routers/admin/users_test.go rename to routers/web/admin/users_test.go index b19dcb886bde..3d0b11a77449 100644 --- a/routers/admin/users_test.go +++ b/routers/web/admin/users_test.go @@ -8,6 +8,8 @@ import ( "testing" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/setting" + api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/test" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/services/forms" @@ -54,7 +56,6 @@ func TestNewUserPost_MustChangePassword(t *testing.T) { } func TestNewUserPost_MustChangePasswordFalse(t *testing.T) { - models.PrepareTestEnv(t) ctx := test.MockContext(t, "admin/users/new") @@ -92,7 +93,6 @@ func TestNewUserPost_MustChangePasswordFalse(t *testing.T) { } func TestNewUserPost_InvalidEmail(t *testing.T) { - models.PrepareTestEnv(t) ctx := test.MockContext(t, "admin/users/new") @@ -121,3 +121,80 @@ func TestNewUserPost_InvalidEmail(t *testing.T) { assert.NotEmpty(t, ctx.Flash.ErrorMsg) } + +func TestNewUserPost_VisibilityDefaultPublic(t *testing.T) { + models.PrepareTestEnv(t) + ctx := test.MockContext(t, "admin/users/new") + + u := models.AssertExistsAndLoadBean(t, &models.User{ + IsAdmin: true, + ID: 2, + }).(*models.User) + + ctx.User = u + + username := "gitea" + email := "gitea@gitea.io" + + form := forms.AdminCreateUserForm{ + LoginType: "local", + LoginName: "local", + UserName: username, + Email: email, + Password: "abc123ABC!=$", + SendNotify: false, + MustChangePassword: false, + } + + web.SetForm(ctx, &form) + NewUserPost(ctx) + + assert.NotEmpty(t, ctx.Flash.SuccessMsg) + + u, err := models.GetUserByName(username) + + assert.NoError(t, err) + assert.Equal(t, username, u.Name) + assert.Equal(t, email, u.Email) + // As default user visibility + assert.Equal(t, setting.Service.DefaultUserVisibilityMode, u.Visibility) +} + +func TestNewUserPost_VisibilityPrivate(t *testing.T) { + models.PrepareTestEnv(t) + ctx := test.MockContext(t, "admin/users/new") + + u := models.AssertExistsAndLoadBean(t, &models.User{ + IsAdmin: true, + ID: 2, + }).(*models.User) + + ctx.User = u + + username := "gitea" + email := "gitea@gitea.io" + + form := forms.AdminCreateUserForm{ + LoginType: "local", + LoginName: "local", + UserName: username, + Email: email, + Password: "abc123ABC!=$", + SendNotify: false, + MustChangePassword: false, + Visibility: api.VisibleTypePrivate, + } + + web.SetForm(ctx, &form) + NewUserPost(ctx) + + assert.NotEmpty(t, ctx.Flash.SuccessMsg) + + u, err := models.GetUserByName(username) + + assert.NoError(t, err) + assert.Equal(t, username, u.Name) + assert.Equal(t, email, u.Email) + // As default user visibility + assert.True(t, u.Visibility.IsPrivate()) +} diff --git a/routers/routes/base.go b/routers/web/base.go similarity index 73% rename from routers/routes/base.go rename to routers/web/base.go index 743582d4a56d..f079be51f046 100644 --- a/routers/routes/base.go +++ b/routers/web/base.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package routes +package web import ( "errors" @@ -13,9 +13,8 @@ import ( "path" "path/filepath" "strings" - "time" - "code.gitea.io/gitea/modules/auth/sso" + "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/httpcache" "code.gitea.io/gitea/modules/log" @@ -23,30 +22,11 @@ import ( "code.gitea.io/gitea/modules/storage" "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/web/middleware" + "code.gitea.io/gitea/services/auth" "gitea.com/go-chi/session" ) -// LoggerHandler is a handler that will log the routing to the default gitea log -func LoggerHandler(level log.Level) func(next http.Handler) http.Handler { - return func(next http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { - start := time.Now() - - _ = log.GetLogger("router").Log(0, level, "Started %s %s for %s", log.ColoredMethod(req.Method), req.URL.RequestURI(), req.RemoteAddr) - - next.ServeHTTP(w, req) - - var status int - if v, ok := w.(context.ResponseWriter); ok { - status = v.Status() - } - - _ = log.GetLogger("router").Log(0, level, "Completed %s %s %v %s in %v", log.ColoredMethod(req.Method), req.URL.RequestURI(), log.ColoredStatus(status), log.ColoredStatus(status, http.StatusText(status)), log.ColoredTime(time.Since(start))) - }) - } -} - func storageHandler(storageSetting setting.Storage, prefix string, objStore storage.ObjectStorage) func(next http.Handler) http.Handler { return func(next http.Handler) http.Handler { if storageSetting.ServeDirect { @@ -133,12 +113,10 @@ func storageHandler(storageSetting setting.Storage, prefix string, objStore stor } } -type dataStore struct { - Data map[string]interface{} -} +type dataStore map[string]interface{} func (d *dataStore) GetData() map[string]interface{} { - return d.Data + return *d } // Recovery returns a middleware that recovers from any panics and writes a 500 and a log if so. @@ -164,32 +142,41 @@ func Recovery() func(next http.Handler) http.Handler { var lc = middleware.Locale(w, req) var store = dataStore{ - Data: templates.Vars{ - "Language": lc.Language(), - "CurrentURL": setting.AppSubURL + req.URL.RequestURI(), - "i18n": lc, - }, + "Language": lc.Language(), + "CurrentURL": setting.AppSubURL + req.URL.RequestURI(), + "i18n": lc, } - // Get user from session if logged in. - user, _ := sso.SignedInUser(req, w, &store, sessionStore) + var user *models.User + if apiContext := context.GetAPIContext(req); apiContext != nil { + user = apiContext.User + } + if user == nil { + if ctx := context.GetContext(req); ctx != nil { + user = ctx.User + } + } + if user == nil { + // Get user from session if logged in - do not attempt to sign-in + user = auth.SessionUser(sessionStore) + } if user != nil { - store.Data["IsSigned"] = true - store.Data["SignedUser"] = user - store.Data["SignedUserID"] = user.ID - store.Data["SignedUserName"] = user.Name - store.Data["IsAdmin"] = user.IsAdmin + store["IsSigned"] = true + store["SignedUser"] = user + store["SignedUserID"] = user.ID + store["SignedUserName"] = user.Name + store["IsAdmin"] = user.IsAdmin } else { - store.Data["SignedUserID"] = int64(0) - store.Data["SignedUserName"] = "" + store["SignedUserID"] = int64(0) + store["SignedUserName"] = "" } w.Header().Set(`X-Frame-Options`, `SAMEORIGIN`) if !setting.IsProd() { - store.Data["ErrorMsg"] = combinedErr + store["ErrorMsg"] = combinedErr } - err = rnd.HTML(w, 500, "status/500", templates.BaseVars().Merge(store.Data)) + err = rnd.HTML(w, 500, "status/500", templates.BaseVars().Merge(store)) if err != nil { log.Error("%v", err) } diff --git a/routers/dev/template.go b/routers/web/dev/template.go similarity index 100% rename from routers/dev/template.go rename to routers/web/dev/template.go diff --git a/routers/events/events.go b/routers/web/events/events.go similarity index 98% rename from routers/events/events.go rename to routers/web/events/events.go index 2c1034038fbb..f9cc27485175 100644 --- a/routers/events/events.go +++ b/routers/web/events/events.go @@ -15,7 +15,7 @@ import ( "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/routers/user" + "code.gitea.io/gitea/routers/web/user" jsoniter "github.com/json-iterator/go" ) @@ -42,7 +42,7 @@ func Events(ctx *context.Context) { } // Listen to connection close and un-register messageChan - notify := ctx.Req.Context().Done() + notify := ctx.Done() ctx.Resp.Flush() shutdownCtx := graceful.GetManager().ShutdownContext() diff --git a/routers/web/explore/code.go b/routers/web/explore/code.go new file mode 100644 index 000000000000..bf15b93cffd4 --- /dev/null +++ b/routers/web/explore/code.go @@ -0,0 +1,139 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package explore + +import ( + "net/http" + "strings" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/base" + "code.gitea.io/gitea/modules/context" + code_indexer "code.gitea.io/gitea/modules/indexer/code" + "code.gitea.io/gitea/modules/setting" +) + +const ( + // tplExploreCode explore code page template + tplExploreCode base.TplName = "explore/code" +) + +// Code render explore code page +func Code(ctx *context.Context) { + if !setting.Indexer.RepoIndexerEnabled { + ctx.Redirect(setting.AppSubURL+"/explore", 302) + return + } + + ctx.Data["UsersIsDisabled"] = setting.Service.Explore.DisableUsersPage + ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled + ctx.Data["Title"] = ctx.Tr("explore") + ctx.Data["PageIsExplore"] = true + ctx.Data["PageIsExploreCode"] = true + + language := strings.TrimSpace(ctx.Query("l")) + keyword := strings.TrimSpace(ctx.Query("q")) + page := ctx.QueryInt("page") + if page <= 0 { + page = 1 + } + + queryType := strings.TrimSpace(ctx.Query("t")) + isMatch := queryType == "match" + + var ( + repoIDs []int64 + err error + isAdmin bool + ) + if ctx.User != nil { + isAdmin = ctx.User.IsAdmin + } + + // guest user or non-admin user + if ctx.User == nil || !isAdmin { + repoIDs, err = models.FindUserAccessibleRepoIDs(ctx.User) + if err != nil { + ctx.ServerError("SearchResults", err) + return + } + } + + var ( + total int + searchResults []*code_indexer.Result + searchResultLanguages []*code_indexer.SearchResultLanguages + ) + + // if non-admin login user, we need check UnitTypeCode at first + if ctx.User != nil && len(repoIDs) > 0 { + repoMaps, err := models.GetRepositoriesMapByIDs(repoIDs) + if err != nil { + ctx.ServerError("SearchResults", err) + return + } + + var rightRepoMap = make(map[int64]*models.Repository, len(repoMaps)) + repoIDs = make([]int64, 0, len(repoMaps)) + for id, repo := range repoMaps { + if repo.CheckUnitUser(ctx.User, models.UnitTypeCode) { + rightRepoMap[id] = repo + repoIDs = append(repoIDs, id) + } + } + + ctx.Data["RepoMaps"] = rightRepoMap + + total, searchResults, searchResultLanguages, err = code_indexer.PerformSearch(repoIDs, language, keyword, page, setting.UI.RepoSearchPagingNum, isMatch) + if err != nil { + ctx.ServerError("SearchResults", err) + return + } + // if non-login user or isAdmin, no need to check UnitTypeCode + } else if (ctx.User == nil && len(repoIDs) > 0) || isAdmin { + total, searchResults, searchResultLanguages, err = code_indexer.PerformSearch(repoIDs, language, keyword, page, setting.UI.RepoSearchPagingNum, isMatch) + if err != nil { + ctx.ServerError("SearchResults", err) + return + } + + var loadRepoIDs = make([]int64, 0, len(searchResults)) + for _, result := range searchResults { + var find bool + for _, id := range loadRepoIDs { + if id == result.RepoID { + find = true + break + } + } + if !find { + loadRepoIDs = append(loadRepoIDs, result.RepoID) + } + } + + repoMaps, err := models.GetRepositoriesMapByIDs(loadRepoIDs) + if err != nil { + ctx.ServerError("SearchResults", err) + return + } + + ctx.Data["RepoMaps"] = repoMaps + } + + ctx.Data["Keyword"] = keyword + ctx.Data["Language"] = language + ctx.Data["queryType"] = queryType + ctx.Data["SearchResults"] = searchResults + ctx.Data["SearchResultLanguages"] = searchResultLanguages + ctx.Data["RequireHighlightJS"] = true + ctx.Data["PageIsViewCode"] = true + + pager := context.NewPagination(total, setting.UI.RepoSearchPagingNum, page, 5) + pager.SetDefaultParams(ctx) + pager.AddParam(ctx, "l", "Language") + ctx.Data["Page"] = pager + + ctx.HTML(http.StatusOK, tplExploreCode) +} diff --git a/routers/web/explore/org.go b/routers/web/explore/org.go new file mode 100644 index 000000000000..470e0eb8530b --- /dev/null +++ b/routers/web/explore/org.go @@ -0,0 +1,39 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package explore + +import ( + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/base" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/structs" +) + +const ( + // tplExploreOrganizations explore organizations page template + tplExploreOrganizations base.TplName = "explore/organizations" +) + +// Organizations render explore organizations page +func Organizations(ctx *context.Context) { + ctx.Data["UsersIsDisabled"] = setting.Service.Explore.DisableUsersPage + ctx.Data["Title"] = ctx.Tr("explore") + ctx.Data["PageIsExplore"] = true + ctx.Data["PageIsExploreOrganizations"] = true + ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled + + visibleTypes := []structs.VisibleType{structs.VisibleTypePublic} + if ctx.User != nil { + visibleTypes = append(visibleTypes, structs.VisibleTypeLimited, structs.VisibleTypePrivate) + } + + RenderUserSearch(ctx, &models.SearchUserOptions{ + Actor: ctx.User, + Type: models.UserTypeOrganization, + ListOptions: models.ListOptions{PageSize: setting.UI.ExplorePagingNum}, + Visible: visibleTypes, + }, tplExploreOrganizations) +} diff --git a/routers/web/explore/repo.go b/routers/web/explore/repo.go new file mode 100644 index 000000000000..e9efae5688d7 --- /dev/null +++ b/routers/web/explore/repo.go @@ -0,0 +1,131 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package explore + +import ( + "net/http" + "strings" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/base" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/setting" +) + +const ( + // tplExploreRepos explore repositories page template + tplExploreRepos base.TplName = "explore/repos" +) + +// RepoSearchOptions when calling search repositories +type RepoSearchOptions struct { + OwnerID int64 + Private bool + Restricted bool + PageSize int + TplName base.TplName +} + +// RenderRepoSearch render repositories search page +func RenderRepoSearch(ctx *context.Context, opts *RepoSearchOptions) { + page := ctx.QueryInt("page") + if page <= 0 { + page = 1 + } + + var ( + repos []*models.Repository + count int64 + err error + orderBy models.SearchOrderBy + ) + + ctx.Data["SortType"] = ctx.Query("sort") + switch ctx.Query("sort") { + case "newest": + orderBy = models.SearchOrderByNewest + case "oldest": + orderBy = models.SearchOrderByOldest + case "recentupdate": + orderBy = models.SearchOrderByRecentUpdated + case "leastupdate": + orderBy = models.SearchOrderByLeastUpdated + case "reversealphabetically": + orderBy = models.SearchOrderByAlphabeticallyReverse + case "alphabetically": + orderBy = models.SearchOrderByAlphabetically + case "reversesize": + orderBy = models.SearchOrderBySizeReverse + case "size": + orderBy = models.SearchOrderBySize + case "moststars": + orderBy = models.SearchOrderByStarsReverse + case "feweststars": + orderBy = models.SearchOrderByStars + case "mostforks": + orderBy = models.SearchOrderByForksReverse + case "fewestforks": + orderBy = models.SearchOrderByForks + default: + ctx.Data["SortType"] = "recentupdate" + orderBy = models.SearchOrderByRecentUpdated + } + + keyword := strings.Trim(ctx.Query("q"), " ") + topicOnly := ctx.QueryBool("topic") + ctx.Data["TopicOnly"] = topicOnly + + repos, count, err = models.SearchRepository(&models.SearchRepoOptions{ + ListOptions: models.ListOptions{ + Page: page, + PageSize: opts.PageSize, + }, + Actor: ctx.User, + OrderBy: orderBy, + Private: opts.Private, + Keyword: keyword, + OwnerID: opts.OwnerID, + AllPublic: true, + AllLimited: true, + TopicOnly: topicOnly, + IncludeDescription: setting.UI.SearchRepoDescription, + }) + if err != nil { + ctx.ServerError("SearchRepository", err) + return + } + ctx.Data["Keyword"] = keyword + ctx.Data["Total"] = count + ctx.Data["Repos"] = repos + ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled + + pager := context.NewPagination(int(count), opts.PageSize, page, 5) + pager.SetDefaultParams(ctx) + pager.AddParam(ctx, "topic", "TopicOnly") + ctx.Data["Page"] = pager + + ctx.HTML(http.StatusOK, opts.TplName) +} + +// Repos render explore repositories page +func Repos(ctx *context.Context) { + ctx.Data["UsersIsDisabled"] = setting.Service.Explore.DisableUsersPage + ctx.Data["Title"] = ctx.Tr("explore") + ctx.Data["PageIsExplore"] = true + ctx.Data["PageIsExploreRepositories"] = true + ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled + + var ownerID int64 + if ctx.User != nil && !ctx.User.IsAdmin { + ownerID = ctx.User.ID + } + + RenderRepoSearch(ctx, &RepoSearchOptions{ + PageSize: setting.UI.ExplorePagingNum, + OwnerID: ownerID, + Private: ctx.User != nil, + TplName: tplExploreRepos, + }) +} diff --git a/routers/web/explore/user.go b/routers/web/explore/user.go new file mode 100644 index 000000000000..52f543fe6696 --- /dev/null +++ b/routers/web/explore/user.go @@ -0,0 +1,107 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package explore + +import ( + "bytes" + "net/http" + "strings" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/base" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/util" +) + +const ( + // tplExploreUsers explore users page template + tplExploreUsers base.TplName = "explore/users" +) + +var ( + nullByte = []byte{0x00} +) + +func isKeywordValid(keyword string) bool { + return !bytes.Contains([]byte(keyword), nullByte) +} + +// RenderUserSearch render user search page +func RenderUserSearch(ctx *context.Context, opts *models.SearchUserOptions, tplName base.TplName) { + opts.Page = ctx.QueryInt("page") + if opts.Page <= 1 { + opts.Page = 1 + } + + var ( + users []*models.User + count int64 + err error + orderBy models.SearchOrderBy + ) + + ctx.Data["SortType"] = ctx.Query("sort") + switch ctx.Query("sort") { + case "newest": + orderBy = models.SearchOrderByIDReverse + case "oldest": + orderBy = models.SearchOrderByID + case "recentupdate": + orderBy = models.SearchOrderByRecentUpdated + case "leastupdate": + orderBy = models.SearchOrderByLeastUpdated + case "reversealphabetically": + orderBy = models.SearchOrderByAlphabeticallyReverse + case "alphabetically": + orderBy = models.SearchOrderByAlphabetically + default: + ctx.Data["SortType"] = "alphabetically" + orderBy = models.SearchOrderByAlphabetically + } + + opts.Keyword = strings.Trim(ctx.Query("q"), " ") + opts.OrderBy = orderBy + if len(opts.Keyword) == 0 || isKeywordValid(opts.Keyword) { + users, count, err = models.SearchUsers(opts) + if err != nil { + ctx.ServerError("SearchUsers", err) + return + } + } + ctx.Data["Keyword"] = opts.Keyword + ctx.Data["Total"] = count + ctx.Data["Users"] = users + ctx.Data["UsersTwoFaStatus"] = models.UserList(users).GetTwoFaStatus() + ctx.Data["ShowUserEmail"] = setting.UI.ShowUserEmail + ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled + + pager := context.NewPagination(int(count), opts.PageSize, opts.Page, 5) + pager.SetDefaultParams(ctx) + ctx.Data["Page"] = pager + + ctx.HTML(http.StatusOK, tplName) +} + +// Users render explore users page +func Users(ctx *context.Context) { + if setting.Service.Explore.DisableUsersPage { + ctx.Redirect(setting.AppSubURL + "/explore/repos") + return + } + ctx.Data["Title"] = ctx.Tr("explore") + ctx.Data["PageIsExplore"] = true + ctx.Data["PageIsExploreUsers"] = true + ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled + + RenderUserSearch(ctx, &models.SearchUserOptions{ + Actor: ctx.User, + Type: models.UserTypeIndividual, + ListOptions: models.ListOptions{PageSize: setting.UI.ExplorePagingNum}, + IsActive: util.OptionalBoolTrue, + Visible: []structs.VisibleType{structs.VisibleTypePublic, structs.VisibleTypeLimited, structs.VisibleTypePrivate}, + }, tplExploreUsers) +} diff --git a/routers/web/goget.go b/routers/web/goget.go new file mode 100644 index 000000000000..77934e7f55ef --- /dev/null +++ b/routers/web/goget.go @@ -0,0 +1,86 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package web + +import ( + "net/http" + "net/url" + "path" + "strings" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" + "github.com/unknwon/com" +) + +func goGet(ctx *context.Context) { + if ctx.Req.Method != "GET" || ctx.Query("go-get") != "1" || len(ctx.Req.URL.Query()) > 1 { + return + } + + parts := strings.SplitN(ctx.Req.URL.EscapedPath(), "/", 4) + + if len(parts) < 3 { + return + } + + ownerName := parts[1] + repoName := parts[2] + + // Quick responses appropriate go-get meta with status 200 + // regardless of if user have access to the repository, + // or the repository does not exist at all. + // This is particular a workaround for "go get" command which does not respect + // .netrc file. + + trimmedRepoName := strings.TrimSuffix(repoName, ".git") + + if ownerName == "" || trimmedRepoName == "" { + _, _ = ctx.Write([]byte(` + + + invalid import path + + +`)) + ctx.Status(400) + return + } + branchName := setting.Repository.DefaultBranch + + repo, err := models.GetRepositoryByOwnerAndName(ownerName, repoName) + if err == nil && len(repo.DefaultBranch) > 0 { + branchName = repo.DefaultBranch + } + prefix := setting.AppURL + path.Join(url.PathEscape(ownerName), url.PathEscape(repoName), "src", "branch", util.PathEscapeSegments(branchName)) + + appURL, _ := url.Parse(setting.AppURL) + + insecure := "" + if appURL.Scheme == string(setting.HTTP) { + insecure = "--insecure " + } + ctx.Header().Set("Content-Type", "text/html") + ctx.Status(http.StatusOK) + _, _ = ctx.Write([]byte(com.Expand(` + + + + + + + go get {Insecure}{GoGetImport} + + +`, map[string]string{ + "GoGetImport": context.ComposeGoGetImport(ownerName, trimmedRepoName), + "CloneLink": models.ComposeHTTPSCloneURL(ownerName, repoName), + "GoDocDirectory": prefix + "{/dir}", + "GoDocFile": prefix + "{/dir}/{file}#L{line}", + "Insecure": insecure, + }))) +} diff --git a/routers/web/home.go b/routers/web/home.go new file mode 100644 index 000000000000..f50197691ffd --- /dev/null +++ b/routers/web/home.go @@ -0,0 +1,65 @@ +// Copyright 2014 The Gogs Authors. All rights reserved. +// Copyright 2019 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package web + +import ( + "net/http" + + "code.gitea.io/gitea/modules/base" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/web/middleware" + "code.gitea.io/gitea/routers/web/user" +) + +const ( + // tplHome home page template + tplHome base.TplName = "home" +) + +// Home render home page +func Home(ctx *context.Context) { + if ctx.IsSigned { + if !ctx.User.IsActive && setting.Service.RegisterEmailConfirm { + ctx.Data["Title"] = ctx.Tr("auth.active_your_account") + ctx.HTML(http.StatusOK, user.TplActivate) + } else if !ctx.User.IsActive || ctx.User.ProhibitLogin { + log.Info("Failed authentication attempt for %s from %s", ctx.User.Name, ctx.RemoteAddr()) + ctx.Data["Title"] = ctx.Tr("auth.prohibit_login") + ctx.HTML(http.StatusOK, "user/auth/prohibit_login") + } else if ctx.User.MustChangePassword { + ctx.Data["Title"] = ctx.Tr("auth.must_change_password") + ctx.Data["ChangePasscodeLink"] = setting.AppSubURL + "/user/change_password" + middleware.SetRedirectToCookie(ctx.Resp, setting.AppSubURL+ctx.Req.URL.RequestURI()) + ctx.Redirect(setting.AppSubURL + "/user/settings/change_password") + } else { + user.Dashboard(ctx) + } + return + // Check non-logged users landing page. + } else if setting.LandingPageURL != setting.LandingPageHome { + ctx.Redirect(setting.AppSubURL + string(setting.LandingPageURL)) + return + } + + // Check auto-login. + uname := ctx.GetCookie(setting.CookieUserName) + if len(uname) != 0 { + ctx.Redirect(setting.AppSubURL + "/user/login") + return + } + + ctx.Data["PageIsHome"] = true + ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled + ctx.HTML(http.StatusOK, tplHome) +} + +// NotFound render 404 page +func NotFound(ctx *context.Context) { + ctx.Data["Title"] = "Page Not Found" + ctx.NotFound("home.NotFound", nil) +} diff --git a/routers/metrics.go b/routers/web/metrics.go similarity index 98% rename from routers/metrics.go rename to routers/web/metrics.go index db2fb8de4438..37558ee33764 100644 --- a/routers/metrics.go +++ b/routers/web/metrics.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package routers +package web import ( "crypto/subtle" diff --git a/routers/org/home.go b/routers/web/org/home.go similarity index 96% rename from routers/org/home.go rename to routers/web/org/home.go index d84ae870ab6d..aad0a2a90b33 100644 --- a/routers/org/home.go +++ b/routers/web/org/home.go @@ -30,8 +30,8 @@ func Home(ctx *context.Context) { org := ctx.Org.Organization - if !models.HasOrgVisible(org, ctx.User) { - ctx.NotFound("HasOrgVisible", nil) + if !models.HasOrgOrUserVisible(org, ctx.User) { + ctx.NotFound("HasOrgOrUserVisible", nil) return } @@ -41,6 +41,7 @@ func Home(ctx *context.Context) { desc, err := markdown.RenderString(&markup.RenderContext{ URLPrefix: ctx.Repo.RepoLink, Metas: map[string]string{"mode": "document"}, + GitRepo: ctx.Repo.GitRepo, }, org.Description) if err != nil { ctx.ServerError("RenderString", err) diff --git a/routers/org/members.go b/routers/web/org/members.go similarity index 100% rename from routers/org/members.go rename to routers/web/org/members.go diff --git a/routers/org/org.go b/routers/web/org/org.go similarity index 100% rename from routers/org/org.go rename to routers/web/org/org.go diff --git a/routers/org/org_labels.go b/routers/web/org/org_labels.go similarity index 100% rename from routers/org/org_labels.go rename to routers/web/org/org_labels.go diff --git a/routers/org/setting.go b/routers/web/org/setting.go similarity index 93% rename from routers/org/setting.go rename to routers/web/org/setting.go index e7995fe8fa59..e848939187ce 100644 --- a/routers/org/setting.go +++ b/routers/web/org/setting.go @@ -15,7 +15,7 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/web" - userSetting "code.gitea.io/gitea/routers/user/setting" + userSetting "code.gitea.io/gitea/routers/web/user/setting" "code.gitea.io/gitea/services/forms" ) @@ -39,7 +39,7 @@ func Settings(ctx *context.Context) { ctx.HTML(http.StatusOK, tplSettingsOptions) } -// SettingsPost response for settings change submited +// SettingsPost response for settings change submitted func SettingsPost(ctx *context.Context) { form := web.GetForm(ctx).(*forms.UpdateOrgSettingForm) ctx.Data["Title"] = ctx.Tr("org.settings") @@ -52,6 +52,7 @@ func SettingsPost(ctx *context.Context) { } org := ctx.Org.Organization + nameChanged := org.Name != form.Name // Check if organization name has been changed. if org.LowerName != strings.ToLower(form.Name) { @@ -75,7 +76,9 @@ func SettingsPost(ctx *context.Context) { // reset ctx.org.OrgLink with new name ctx.Org.OrgLink = setting.AppSubURL + "/org/" + form.Name log.Trace("Organization name changed: %s -> %s", org.Name, form.Name) + nameChanged = false } + // In case it's just a case change. org.Name = form.Name org.LowerName = strings.ToLower(form.Name) @@ -105,11 +108,17 @@ func SettingsPost(ctx *context.Context) { return } for _, repo := range org.Repos { + repo.OwnerName = org.Name if err := models.UpdateRepository(repo, true); err != nil { ctx.ServerError("UpdateRepository", err) return } } + } else if nameChanged { + if err := models.UpdateRepositoryOwnerNames(org.ID, org.Name); err != nil { + ctx.ServerError("UpdateRepository", err) + return + } } log.Trace("Organization setting updated: %s", org.Name) @@ -130,7 +139,7 @@ func SettingsAvatar(ctx *context.Context) { ctx.Redirect(ctx.Org.OrgLink + "/settings") } -// SettingsDeleteAvatar response for delete avatar on setings page +// SettingsDeleteAvatar response for delete avatar on settings page func SettingsDeleteAvatar(ctx *context.Context) { if err := ctx.Org.Organization.DeleteAvatar(); err != nil { ctx.Flash.Error(err.Error()) diff --git a/routers/org/teams.go b/routers/web/org/teams.go similarity index 100% rename from routers/org/teams.go rename to routers/web/org/teams.go diff --git a/routers/repo/activity.go b/routers/web/repo/activity.go similarity index 100% rename from routers/repo/activity.go rename to routers/web/repo/activity.go diff --git a/routers/repo/attachment.go b/routers/web/repo/attachment.go similarity index 97% rename from routers/repo/attachment.go rename to routers/web/repo/attachment.go index f53e7450ae02..5becbea2713a 100644 --- a/routers/repo/attachment.go +++ b/routers/web/repo/attachment.go @@ -15,6 +15,7 @@ import ( "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/storage" "code.gitea.io/gitea/modules/upload" + "code.gitea.io/gitea/routers/common" ) // UploadIssueAttachment response for Issue/PR attachments @@ -152,7 +153,7 @@ func GetAttachment(ctx *context.Context) { } defer fr.Close() - if err = ServeData(ctx, attach.Name, attach.Size, fr); err != nil { + if err = common.ServeData(ctx, attach.Name, attach.Size, fr); err != nil { ctx.ServerError("ServeData", err) return } diff --git a/routers/repo/blame.go b/routers/web/repo/blame.go similarity index 56% rename from routers/repo/blame.go rename to routers/web/repo/blame.go index f5b228bdfe13..4ade9e9a93a5 100644 --- a/routers/repo/blame.go +++ b/routers/web/repo/blame.go @@ -5,7 +5,6 @@ package repo import ( - "bytes" "container/list" "fmt" "html" @@ -18,7 +17,6 @@ import ( "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/highlight" - "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/timeutil" ) @@ -27,6 +25,20 @@ const ( tplBlame base.TplName = "repo/home" ) +type blameRow struct { + RowNumber int + Avatar gotemplate.HTML + RepoLink string + PartSha string + PreviousSha string + PreviousShaURL string + IsFirstCommit bool + CommitURL string + CommitMessage string + CommitSince gotemplate.HTML + Code gotemplate.HTML +} + // RefBlame render blame page func RefBlame(ctx *context.Context) { fileName := ctx.Repo.TreePath @@ -39,19 +51,6 @@ func RefBlame(ctx *context.Context) { repoName := ctx.Repo.Repository.Name commitID := ctx.Repo.CommitID - commit, err := ctx.Repo.GitRepo.GetCommit(commitID) - if err != nil { - if git.IsErrNotExist(err) { - ctx.NotFound("Repo.GitRepo.GetCommit", err) - } else { - ctx.ServerError("Repo.GitRepo.GetCommit", err) - } - return - } - if len(commitID) != 40 { - commitID = commit.ID.String() - } - branchLink := ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL() treeLink := branchLink rawLink := ctx.Repo.RepoLink + "/raw/" + ctx.Repo.BranchNameSubURL() @@ -74,25 +73,6 @@ func RefBlame(ctx *context.Context) { } } - // Show latest commit info of repository in table header, - // or of directory if not in root directory. - latestCommit := ctx.Repo.Commit - if len(ctx.Repo.TreePath) > 0 { - latestCommit, err = ctx.Repo.Commit.GetCommitByPath(ctx.Repo.TreePath) - if err != nil { - ctx.ServerError("GetCommitByPath", err) - return - } - } - ctx.Data["LatestCommit"] = latestCommit - ctx.Data["LatestCommitVerification"] = models.ParseCommitWithSignature(latestCommit) - ctx.Data["LatestCommitUser"] = models.ValidateCommitWithEmail(latestCommit) - - statuses, err := models.GetLatestCommitStatus(ctx.Repo.Repository.ID, ctx.Repo.Commit.ID.String(), models.ListOptions{}) - if err != nil { - log.Error("GetLatestCommitStatus: %v", err) - } - // Get current entry user currently looking at. entry, err := ctx.Repo.Commit.GetTreeEntryByPath(ctx.Repo.TreePath) if err != nil { @@ -102,9 +82,6 @@ func RefBlame(ctx *context.Context) { blob := entry.Blob() - ctx.Data["LatestCommitStatus"] = models.CalcCommitStatus(statuses) - ctx.Data["LatestCommitStatuses"] = statuses - ctx.Data["Paths"] = paths ctx.Data["TreeLink"] = treeLink ctx.Data["TreeNames"] = treeNames @@ -124,7 +101,7 @@ func RefBlame(ctx *context.Context) { return } - blameReader, err := git.CreateBlameReader(ctx.Req.Context(), models.RepoPath(userName, repoName), commitID, fileName) + blameReader, err := git.CreateBlameReader(ctx, models.RepoPath(userName, repoName), commitID, fileName) if err != nil { ctx.NotFound("CreateBlameReader", err) return @@ -145,8 +122,33 @@ func RefBlame(ctx *context.Context) { blameParts = append(blameParts, *blamePart) } + // Get Topics of this repo + renderRepoTopics(ctx) + if ctx.Written() { + return + } + + commitNames, previousCommits := processBlameParts(ctx, blameParts) + if ctx.Written() { + return + } + + renderBlame(ctx, blameParts, commitNames, previousCommits) + + ctx.HTML(http.StatusOK, tplBlame) +} + +func processBlameParts(ctx *context.Context, blameParts []git.BlamePart) (map[string]models.UserCommit, map[string]string) { + // store commit data by SHA to look up avatar info etc commitNames := make(map[string]models.UserCommit) + // previousCommits contains links from SHA to parent SHA, + // if parent also contains the current TreePath. + previousCommits := make(map[string]string) + // and as blameParts can reference the same commits multiple + // times, we cache the lookup work locally commits := list.New() + commitCache := map[string]*git.Commit{} + commitCache[ctx.Repo.Commit.ID.String()] = ctx.Repo.Commit for _, part := range blameParts { sha := part.Sha @@ -154,14 +156,38 @@ func RefBlame(ctx *context.Context) { continue } - commit, err := ctx.Repo.GitRepo.GetCommit(sha) - if err != nil { - if git.IsErrNotExist(err) { - ctx.NotFound("Repo.GitRepo.GetCommit", err) - } else { - ctx.ServerError("Repo.GitRepo.GetCommit", err) + // find the blamePart commit, to look up parent & email address for avatars + commit, ok := commitCache[sha] + var err error + if !ok { + commit, err = ctx.Repo.GitRepo.GetCommit(sha) + if err != nil { + if git.IsErrNotExist(err) { + ctx.NotFound("Repo.GitRepo.GetCommit", err) + } else { + ctx.ServerError("Repo.GitRepo.GetCommit", err) + } + return nil, nil + } + commitCache[sha] = commit + } + + // find parent commit + if commit.ParentCount() > 0 { + psha := commit.Parents[0] + previousCommit, ok := commitCache[psha.String()] + if !ok { + previousCommit, _ = commit.Parent(0) + if previousCommit != nil { + commitCache[psha.String()] = previousCommit + } + } + // only store parent commit ONCE, if it has the file + if previousCommit != nil { + if haz1, _ := previousCommit.HasFile(ctx.Repo.TreePath); haz1 { + previousCommits[commit.ID.String()] = previousCommit.ID.String() + } } - return } commits.PushBack(commit) @@ -169,46 +195,39 @@ func RefBlame(ctx *context.Context) { commitNames[commit.ID.String()] = models.UserCommit{} } + // populate commit email addresses to later look up avatars. commits = models.ValidateCommitsWithEmails(commits) - for e := commits.Front(); e != nil; e = e.Next() { c := e.Value.(models.UserCommit) - commitNames[c.ID.String()] = c } - // Get Topics of this repo - renderRepoTopics(ctx) - if ctx.Written() { - return - } - - renderBlame(ctx, blameParts, commitNames) - - ctx.HTML(http.StatusOK, tplBlame) + return commitNames, previousCommits } -func renderBlame(ctx *context.Context, blameParts []git.BlamePart, commitNames map[string]models.UserCommit) { +func renderBlame(ctx *context.Context, blameParts []git.BlamePart, commitNames map[string]models.UserCommit, previousCommits map[string]string) { repoLink := ctx.Repo.RepoLink var lines = make([]string, 0) - - var commitInfo bytes.Buffer - var lineNumbers bytes.Buffer - var codeLines bytes.Buffer + rows := make([]*blameRow, 0) var i = 0 - for pi, part := range blameParts { + var commitCnt = 0 + for _, part := range blameParts { for index, line := range part.Lines { i++ lines = append(lines, line) - var attr = "" - if len(part.Lines)-1 == index && len(blameParts)-1 != pi { - attr = " bottom-line" + br := &blameRow{ + RowNumber: i, } + commit := commitNames[part.Sha] + previousSha := previousCommits[part.Sha] if index == 0 { + // Count commit number + commitCnt++ + // User avatar image commitSince := timeutil.TimeSinceUnix(timeutil.TimeStamp(commit.Author.When.Unix()), ctx.Data["Lang"].(string)) @@ -219,16 +238,14 @@ func renderBlame(ctx *context.Context, blameParts []git.BlamePart, commitNames m avatar = string(templates.AvatarByEmail(commit.Author.Email, commit.Author.Name, 18, "mr-3")) } - commitInfo.WriteString(fmt.Sprintf(`
    %s
    %s
    `, attr, avatar, repoLink, part.Sha, html.EscapeString(commit.CommitMessage), commitSince)) - } else { - commitInfo.WriteString(fmt.Sprintf(`
    `, attr)) - } - - //Line number - if len(part.Lines)-1 == index && len(blameParts)-1 != pi { - lineNumbers.WriteString(fmt.Sprintf(``, i, i)) - } else { - lineNumbers.WriteString(fmt.Sprintf(``, i, i)) + br.Avatar = gotemplate.HTML(avatar) + br.RepoLink = repoLink + br.PartSha = part.Sha + br.PreviousSha = previousSha + br.PreviousShaURL = fmt.Sprintf("%s/blame/commit/%s/%s", repoLink, previousSha, ctx.Repo.TreePath) + br.CommitURL = fmt.Sprintf("%s/commit/%s", repoLink, part.Sha) + br.CommitMessage = html.EscapeString(commit.CommitMessage) + br.CommitSince = commitSince } if i != len(lines)-1 { @@ -236,16 +253,12 @@ func renderBlame(ctx *context.Context, blameParts []git.BlamePart, commitNames m } fileName := fmt.Sprintf("%v", ctx.Data["FileName"]) line = highlight.Code(fileName, line) - line = `` + line + `` - if len(part.Lines)-1 == index && len(blameParts)-1 != pi { - codeLines.WriteString(fmt.Sprintf(`
  • %s
  • `, i, i, line)) - } else { - codeLines.WriteString(fmt.Sprintf(`
  • %s
  • `, i, i, line)) - } + + br.Code = gotemplate.HTML(line) + rows = append(rows, br) } } - ctx.Data["BlameContent"] = gotemplate.HTML(codeLines.String()) - ctx.Data["BlameCommitInfo"] = gotemplate.HTML(commitInfo.String()) - ctx.Data["BlameLineNums"] = gotemplate.HTML(lineNumbers.String()) + ctx.Data["BlameRows"] = rows + ctx.Data["CommitCnt"] = commitCnt } diff --git a/routers/repo/branch.go b/routers/web/repo/branch.go similarity index 84% rename from routers/repo/branch.go rename to routers/web/repo/branch.go index eecaa888210c..da72940144fe 100644 --- a/routers/repo/branch.go +++ b/routers/web/repo/branch.go @@ -6,6 +6,7 @@ package repo import ( + "errors" "fmt" "net/http" "strings" @@ -17,6 +18,7 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/repofiles" repo_module "code.gitea.io/gitea/modules/repository" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/routers/utils" @@ -61,8 +63,8 @@ func Branches(ctx *context.Context) { } limit := ctx.QueryInt("limit") - if limit <= 0 || limit > git.BranchesRangeSize { - limit = git.BranchesRangeSize + if limit <= 0 || limit > setting.Git.BranchesRangeSize { + limit = setting.Git.BranchesRangeSize } skip := (page - 1) * limit @@ -72,7 +74,7 @@ func Branches(ctx *context.Context) { return } ctx.Data["Branches"] = branches - pager := context.NewPagination(int(branchesCount), git.BranchesRangeSize, page, 5) + pager := context.NewPagination(int(branchesCount), setting.Git.BranchesRangeSize, page, 5) pager.SetDefaultParams(ctx) ctx.Data["Page"] = pager @@ -83,34 +85,23 @@ func Branches(ctx *context.Context) { func DeleteBranchPost(ctx *context.Context) { defer redirect(ctx) branchName := ctx.Query("name") - if branchName == ctx.Repo.Repository.DefaultBranch { - log.Debug("DeleteBranch: Can't delete default branch '%s'", branchName) - ctx.Flash.Error(ctx.Tr("repo.branch.default_deletion_failed", branchName)) - return - } - - isProtected, err := ctx.Repo.Repository.IsProtectedBranch(branchName, ctx.User) - if err != nil { - log.Error("DeleteBranch: %v", err) - ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", branchName)) - return - } - - if isProtected { - log.Debug("DeleteBranch: Can't delete protected branch '%s'", branchName) - ctx.Flash.Error(ctx.Tr("repo.branch.protected_deletion_failed", branchName)) - return - } - if !ctx.Repo.GitRepo.IsBranchExist(branchName) { - log.Debug("DeleteBranch: Can't delete non existing branch '%s'", branchName) - ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", branchName)) - return - } + if err := repo_service.DeleteBranch(ctx.User, ctx.Repo.Repository, ctx.Repo.GitRepo, branchName); err != nil { + switch { + case git.IsErrBranchNotExist(err): + log.Debug("DeleteBranch: Can't delete non existing branch '%s'", branchName) + ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", branchName)) + case errors.Is(err, repo_service.ErrBranchIsDefault): + log.Debug("DeleteBranch: Can't delete default branch '%s'", branchName) + ctx.Flash.Error(ctx.Tr("repo.branch.default_deletion_failed", branchName)) + case errors.Is(err, repo_service.ErrBranchIsProtected): + log.Debug("DeleteBranch: Can't delete protected branch '%s'", branchName) + ctx.Flash.Error(ctx.Tr("repo.branch.protected_deletion_failed", branchName)) + default: + log.Error("DeleteBranch: %v", err) + ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", branchName)) + } - if err := deleteBranch(ctx, branchName); err != nil { - log.Error("DeleteBranch: %v", err) - ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", branchName)) return } @@ -169,41 +160,6 @@ func redirect(ctx *context.Context) { }) } -func deleteBranch(ctx *context.Context, branchName string) error { - commit, err := ctx.Repo.GitRepo.GetBranchCommit(branchName) - if err != nil { - log.Error("GetBranchCommit: %v", err) - return err - } - - if err := ctx.Repo.GitRepo.DeleteBranch(branchName, git.DeleteBranchOptions{ - Force: true, - }); err != nil { - log.Error("DeleteBranch: %v", err) - return err - } - - // Don't return error below this - if err := repo_service.PushUpdate( - &repo_module.PushUpdateOptions{ - RefFullName: git.BranchPrefix + branchName, - OldCommitID: commit.ID.String(), - NewCommitID: git.EmptySHA, - PusherID: ctx.User.ID, - PusherName: ctx.User.Name, - RepoUserName: ctx.Repo.Owner.Name, - RepoName: ctx.Repo.Repository.Name, - }); err != nil { - log.Error("Update: %v", err) - } - - if err := ctx.Repo.Repository.AddDeletedBranch(branchName, commit.ID.String(), ctx.User.ID); err != nil { - log.Warn("AddDeletedBranch: %v", err) - } - - return nil -} - // loadBranches loads branches from the repository limited by page & pageSize. // NOTE: May write to context on error. func loadBranches(ctx *context.Context, skip, limit int) ([]*Branch, int) { diff --git a/routers/repo/commit.go b/routers/web/repo/commit.go similarity index 97% rename from routers/repo/commit.go rename to routers/web/repo/commit.go index c4719526376f..45ef22f498be 100644 --- a/routers/repo/commit.go +++ b/routers/web/repo/commit.go @@ -63,7 +63,7 @@ func Commits(ctx *context.Context) { pageSize := ctx.QueryInt("limit") if pageSize <= 0 { - pageSize = git.CommitsRangeSize + pageSize = setting.Git.CommitsRangeSize } // Both `git log branchName` and `git log commitId` work. @@ -82,7 +82,7 @@ func Commits(ctx *context.Context) { ctx.Data["CommitCount"] = commitsCount ctx.Data["Branch"] = ctx.Repo.BranchName - pager := context.NewPagination(int(commitsCount), git.CommitsRangeSize, page, 5) + pager := context.NewPagination(int(commitsCount), setting.Git.CommitsRangeSize, page, 5) pager.SetDefaultParams(ctx) ctx.Data["Page"] = pager @@ -250,7 +250,7 @@ func FileHistory(ctx *context.Context) { ctx.Data["CommitCount"] = commitsCount ctx.Data["Branch"] = branchName - pager := context.NewPagination(int(commitsCount), git.CommitsRangeSize, page, 5) + pager := context.NewPagination(int(commitsCount), setting.Git.CommitsRangeSize, page, 5) pager.SetDefaultParams(ctx) ctx.Data["Page"] = pager @@ -355,7 +355,7 @@ func Diff(ctx *context.Context) { } note := &git.Note{} - err = git.GetNote(ctx.Repo.GitRepo, commitID, note) + err = git.GetNote(ctx, ctx.Repo.GitRepo, commitID, note) if err == nil { ctx.Data["Note"] = string(charset.ToUTF8WithFallback(note.Message)) ctx.Data["NoteCommit"] = note.Commit diff --git a/routers/repo/compare.go b/routers/web/repo/compare.go similarity index 93% rename from routers/repo/compare.go rename to routers/web/repo/compare.go index a658374d9b12..fddfc4a63a89 100644 --- a/routers/repo/compare.go +++ b/routers/web/repo/compare.go @@ -37,8 +37,20 @@ func setCompareContext(ctx *context.Context, base *git.Commit, head *git.Commit, ctx.Data["BaseCommit"] = base ctx.Data["HeadCommit"] = head + ctx.Data["GetBlobByPathForCommit"] = func(commit *git.Commit, path string) *git.Blob { + if commit == nil { + return nil + } + + blob, err := commit.GetBlobByPath(path) + if err != nil { + return nil + } + return blob + } + setPathsCompareContext(ctx, base, head, headTarget) - setImageCompareContext(ctx, base, head) + setImageCompareContext(ctx) setCsvCompareContext(ctx) } @@ -57,27 +69,18 @@ func setPathsCompareContext(ctx *context.Context, base *git.Commit, head *git.Co } // setImageCompareContext sets context data that is required by image compare template -func setImageCompareContext(ctx *context.Context, base *git.Commit, head *git.Commit) { - ctx.Data["IsImageFileInHead"] = head.IsImageFile - ctx.Data["IsImageFileInBase"] = base.IsImageFile - ctx.Data["ImageInfoBase"] = func(name string) *git.ImageMetaData { - if base == nil { - return nil +func setImageCompareContext(ctx *context.Context) { + ctx.Data["IsBlobAnImage"] = func(blob *git.Blob) bool { + if blob == nil { + return false } - result, err := base.ImageInfo(name) + + st, err := blob.GuessContentType() if err != nil { - log.Error("ImageInfo failed: %v", err) - return nil + log.Error("GuessContentType failed: %v", err) + return false } - return result - } - ctx.Data["ImageInfo"] = func(name string) *git.ImageMetaData { - result, err := head.ImageInfo(name) - if err != nil { - log.Error("ImageInfo failed: %v", err) - return nil - } - return result + return st.IsImage() && (setting.UI.SVG.Enabled || !st.IsSvgImage()) } } @@ -391,7 +394,7 @@ func ParseCompareInfo(ctx *context.Context) (*models.User, *models.Repository, * if rootRepo != nil && rootRepo.ID != headRepo.ID && rootRepo.ID != baseRepo.ID { - perm, branches, err := getBranchesForRepo(ctx.User, rootRepo) + perm, branches, tags, err := getBranchesAndTagsForRepo(ctx.User, rootRepo) if err != nil { ctx.ServerError("GetBranchesForRepo", err) return nil, nil, nil, nil, "", "" @@ -399,19 +402,20 @@ func ParseCompareInfo(ctx *context.Context) (*models.User, *models.Repository, * if perm { ctx.Data["RootRepo"] = rootRepo ctx.Data["RootRepoBranches"] = branches + ctx.Data["RootRepoTags"] = tags } } // If we have a ownForkRepo and it's different from: // 1. The computed base - // 2. The computed hea + // 2. The computed head // 3. The rootRepo (if we have one) // then get the branches from it. if ownForkRepo != nil && ownForkRepo.ID != headRepo.ID && ownForkRepo.ID != baseRepo.ID && (rootRepo == nil || ownForkRepo.ID != rootRepo.ID) { - perm, branches, err := getBranchesForRepo(ctx.User, ownForkRepo) + perm, branches, tags, err := getBranchesAndTagsForRepo(ctx.User, ownForkRepo) if err != nil { ctx.ServerError("GetBranchesForRepo", err) return nil, nil, nil, nil, "", "" @@ -419,6 +423,7 @@ func ParseCompareInfo(ctx *context.Context) (*models.User, *models.Repository, * if perm { ctx.Data["OwnForkRepo"] = ownForkRepo ctx.Data["OwnForkRepoBranches"] = branches + ctx.Data["OwnForkRepoTags"] = tags } } @@ -572,25 +577,29 @@ func PrepareCompareDiff( return false } -func getBranchesForRepo(user *models.User, repo *models.Repository) (bool, []string, error) { +func getBranchesAndTagsForRepo(user *models.User, repo *models.Repository) (bool, []string, []string, error) { perm, err := models.GetUserRepoPermission(repo, user) if err != nil { - return false, nil, err + return false, nil, nil, err } if !perm.CanRead(models.UnitTypeCode) { - return false, nil, nil + return false, nil, nil, nil } gitRepo, err := git.OpenRepository(repo.RepoPath()) if err != nil { - return false, nil, err + return false, nil, nil, err } defer gitRepo.Close() branches, _, err := gitRepo.GetBranches(0, 0) if err != nil { - return false, nil, err + return false, nil, nil, err + } + tags, err := gitRepo.GetTags() + if err != nil { + return false, nil, nil, err } - return true, branches, nil + return true, branches, tags, nil } // CompareDiff show different from one commit to another commit @@ -608,14 +617,29 @@ func CompareDiff(ctx *context.Context) { return } - if ctx.Data["PageIsComparePull"] == true { - headBranches, _, err := headGitRepo.GetBranches(0, 0) - if err != nil { - ctx.ServerError("GetBranches", err) - return - } - ctx.Data["HeadBranches"] = headBranches + baseGitRepo := ctx.Repo.GitRepo + baseTags, err := baseGitRepo.GetTags() + if err != nil { + ctx.ServerError("GetTags", err) + return + } + ctx.Data["Tags"] = baseTags + headBranches, _, err := headGitRepo.GetBranches(0, 0) + if err != nil { + ctx.ServerError("GetBranches", err) + return + } + ctx.Data["HeadBranches"] = headBranches + + headTags, err := headGitRepo.GetTags() + if err != nil { + ctx.ServerError("GetTags", err) + return + } + ctx.Data["HeadTags"] = headTags + + if ctx.Data["PageIsComparePull"] == true { pr, err := models.GetUnmergedPullRequest(headRepo.ID, ctx.Repo.Repository.ID, headBranch, baseBranch) if err != nil { if !models.IsErrPullRequestNotExist(err) { @@ -690,7 +714,11 @@ func ExcerptBlob(ctx *context.Context) { lastLeft += chunkSize lastRight += chunkSize } else { - section.Lines, err = getExcerptLines(commit, filePath, lastLeft, lastRight, idxRight-lastRight-1) + offset := -1 + if direction == "down" { + offset = 0 + } + section.Lines, err = getExcerptLines(commit, filePath, lastLeft, lastRight, idxRight-lastRight+offset) leftHunkSize = 0 rightHunkSize = 0 idxLeft = lastLeft diff --git a/routers/repo/download.go b/routers/web/repo/download.go similarity index 53% rename from routers/repo/download.go rename to routers/web/repo/download.go index 1eedec8cb177..6f43d4b83919 100644 --- a/routers/repo/download.go +++ b/routers/web/repo/download.go @@ -6,90 +6,14 @@ package repo import ( - "fmt" - "io" - "path" - "strings" - - "code.gitea.io/gitea/modules/base" - "code.gitea.io/gitea/modules/charset" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/httpcache" "code.gitea.io/gitea/modules/lfs" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/routers/common" ) -// ServeData download file from io.Reader -func ServeData(ctx *context.Context, name string, size int64, reader io.Reader) error { - buf := make([]byte, 1024) - n, err := reader.Read(buf) - if err != nil && err != io.EOF { - return err - } - if n >= 0 { - buf = buf[:n] - } - - ctx.Resp.Header().Set("Cache-Control", "public,max-age=86400") - - if size >= 0 { - ctx.Resp.Header().Set("Content-Length", fmt.Sprintf("%d", size)) - } else { - log.Error("ServeData called to serve data: %s with size < 0: %d", name, size) - } - name = path.Base(name) - - // Google Chrome dislike commas in filenames, so let's change it to a space - name = strings.ReplaceAll(name, ",", " ") - - if base.IsTextFile(buf) || ctx.QueryBool("render") { - cs, err := charset.DetectEncoding(buf) - if err != nil { - log.Error("Detect raw file %s charset failed: %v, using by default utf-8", name, err) - cs = "utf-8" - } - ctx.Resp.Header().Set("Content-Type", "text/plain; charset="+strings.ToLower(cs)) - } else if base.IsImageFile(buf) || base.IsPDFFile(buf) { - ctx.Resp.Header().Set("Content-Disposition", fmt.Sprintf(`inline; filename="%s"`, name)) - ctx.Resp.Header().Set("Access-Control-Expose-Headers", "Content-Disposition") - if base.IsSVGImageFile(buf) { - ctx.Resp.Header().Set("Content-Security-Policy", "default-src 'none'; style-src 'unsafe-inline'; sandbox") - ctx.Resp.Header().Set("X-Content-Type-Options", "nosniff") - ctx.Resp.Header().Set("Content-Type", base.SVGMimeType) - } - } else { - ctx.Resp.Header().Set("Content-Disposition", fmt.Sprintf(`attachment; filename="%s"`, name)) - ctx.Resp.Header().Set("Access-Control-Expose-Headers", "Content-Disposition") - } - - _, err = ctx.Resp.Write(buf) - if err != nil { - return err - } - _, err = io.Copy(ctx.Resp, reader) - return err -} - -// ServeBlob download a git.Blob -func ServeBlob(ctx *context.Context, blob *git.Blob) error { - if httpcache.HandleGenericETagCache(ctx.Req, ctx.Resp, `"`+blob.ID.String()+`"`) { - return nil - } - - dataRc, err := blob.DataAsync() - if err != nil { - return err - } - defer func() { - if err = dataRc.Close(); err != nil { - log.Error("ServeBlob: Close: %v", err) - } - }() - - return ServeData(ctx, ctx.Repo.TreePath, blob.Size(), dataRc) -} - // ServeBlobOrLFS download a git.Blob redirecting to LFS if necessary func ServeBlobOrLFS(ctx *context.Context, blob *git.Blob) error { if httpcache.HandleGenericETagCache(ctx.Req, ctx.Resp, `"`+blob.ID.String()+`"`) { @@ -100,7 +24,11 @@ func ServeBlobOrLFS(ctx *context.Context, blob *git.Blob) error { if err != nil { return err } + closed := false defer func() { + if closed { + return + } if err = dataRc.Close(); err != nil { log.Error("ServeBlobOrLFS: Close: %v", err) } @@ -110,7 +38,11 @@ func ServeBlobOrLFS(ctx *context.Context, blob *git.Blob) error { if pointer.IsValid() { meta, _ := ctx.Repo.Repository.GetLFSMetaObjectByOid(pointer.Oid) if meta == nil { - return ServeBlob(ctx, blob) + if err = dataRc.Close(); err != nil { + log.Error("ServeBlobOrLFS: Close: %v", err) + } + closed = true + return common.ServeBlob(ctx, blob) } if httpcache.HandleGenericETagCache(ctx.Req, ctx.Resp, `"`+pointer.Oid+`"`) { return nil @@ -124,10 +56,14 @@ func ServeBlobOrLFS(ctx *context.Context, blob *git.Blob) error { log.Error("ServeBlobOrLFS: Close: %v", err) } }() - return ServeData(ctx, ctx.Repo.TreePath, meta.Size, lfsDataRc) + return common.ServeData(ctx, ctx.Repo.TreePath, meta.Size, lfsDataRc) + } + if err = dataRc.Close(); err != nil { + log.Error("ServeBlobOrLFS: Close: %v", err) } + closed = true - return ServeBlob(ctx, blob) + return common.ServeBlob(ctx, blob) } // SingleDownload download a file by repos path @@ -141,7 +77,7 @@ func SingleDownload(ctx *context.Context) { } return } - if err = ServeBlob(ctx, blob); err != nil { + if err = common.ServeBlob(ctx, blob); err != nil { ctx.ServerError("ServeBlob", err) } } @@ -173,7 +109,7 @@ func DownloadByID(ctx *context.Context) { } return } - if err = ServeBlob(ctx, blob); err != nil { + if err = common.ServeBlob(ctx, blob); err != nil { ctx.ServerError("ServeBlob", err) } } diff --git a/routers/repo/editor.go b/routers/web/repo/editor.go similarity index 99% rename from routers/repo/editor.go rename to routers/web/repo/editor.go index 2cc5c1e7f2fa..0f978c7b01c5 100644 --- a/routers/repo/editor.go +++ b/routers/web/repo/editor.go @@ -20,6 +20,7 @@ import ( "code.gitea.io/gitea/modules/repofiles" repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/typesniffer" "code.gitea.io/gitea/modules/upload" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" @@ -106,6 +107,7 @@ func editFile(ctx *context.Context, isNewFile bool) { ctx.NotFound("blob.Data", err) return } + defer dataRc.Close() ctx.Data["FileSize"] = blob.Size() @@ -116,12 +118,16 @@ func editFile(ctx *context.Context, isNewFile bool) { buf = buf[:n] // Only some file types are editable online as text. - if !base.IsRepresentableAsText(buf) { - ctx.NotFound("base.IsRepresentableAsText", nil) + if !typesniffer.DetectContentType(buf).IsRepresentableAsText() { + ctx.NotFound("typesniffer.IsRepresentableAsText", nil) return } d, _ := ioutil.ReadAll(dataRc) + if err := dataRc.Close(); err != nil { + log.Error("Error whilst closing blob data: %v", err) + } + buf = append(buf, d...) if content, err := charset.ToUTF8WithErr(buf); err != nil { log.Error("ToUTF8WithErr: %v", err) diff --git a/routers/repo/editor_test.go b/routers/web/repo/editor_test.go similarity index 100% rename from routers/repo/editor_test.go rename to routers/web/repo/editor_test.go diff --git a/routers/repo/http.go b/routers/web/repo/http.go similarity index 79% rename from routers/repo/http.go rename to routers/web/repo/http.go index 95a56e4a2c22..3390f026a0c1 100644 --- a/routers/repo/http.go +++ b/routers/web/repo/http.go @@ -22,20 +22,17 @@ import ( "time" "code.gitea.io/gitea/models" - "code.gitea.io/gitea/modules/auth/sso" - "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/structs" - "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/util" repo_service "code.gitea.io/gitea/services/repository" ) -// httpBase implmentation git smart HTTP protocol +// httpBase implementation git smart HTTP protocol func httpBase(ctx *context.Context) (h *serviceHandler) { if setting.Repository.DisableHTTPGit { ctx.Resp.WriteHeader(http.StatusForbidden) @@ -117,7 +114,7 @@ func httpBase(ctx *context.Context) (h *serviceHandler) { if redirectUserID, err := models.LookupUserRedirect(username); err == nil { context.RedirectToUser(ctx, username, redirectUserID) } else { - ctx.NotFound("GetUserByName", err) + ctx.NotFound(fmt.Sprintf("User %s does not exist", username), nil) } } else { ctx.ServerError("GetUserByName", err) @@ -153,11 +150,8 @@ func httpBase(ctx *context.Context) (h *serviceHandler) { // Only public pull don't need auth. isPublicPull := repoExist && !repo.IsPrivate && isPull var ( - askAuth = !isPublicPull || setting.Service.RequireSignInView - authUser *models.User - authUsername string - authPasswd string - environ []string + askAuth = !isPublicPull || setting.Service.RequireSignInView + environ []string ) // don't allow anonymous pulls if organization is not public @@ -172,108 +166,33 @@ func httpBase(ctx *context.Context) (h *serviceHandler) { // check access if askAuth { - authUsername = ctx.Req.Header.Get(setting.ReverseProxyAuthUser) - if setting.Service.EnableReverseProxyAuth && len(authUsername) > 0 { - authUser, err = models.GetUserByName(authUsername) - if err != nil { - ctx.HandleText(401, "reverse proxy login error, got error while running GetUserByName") - return - } - } else { - authHead := ctx.Req.Header.Get("Authorization") - if len(authHead) == 0 { - ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=\".\"") - ctx.Error(http.StatusUnauthorized) - return - } + // rely on the results of Contexter + if !ctx.IsSigned { + // TODO: support digit auth - which would be Authorization header with digit + ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=\".\"") + ctx.Error(http.StatusUnauthorized) + return + } - auths := strings.Fields(authHead) - // currently check basic auth - // TODO: support digit auth - // FIXME: middlewares/context.go did basic auth check already, - // maybe could use that one. - if len(auths) != 2 || auths[0] != "Basic" { - ctx.HandleText(http.StatusUnauthorized, "no basic auth and digit auth") + if ctx.IsBasicAuth && ctx.Data["IsApiToken"] != true { + _, err = models.GetTwoFactorByUID(ctx.User.ID) + if err == nil { + // TODO: This response should be changed to "invalid credentials" for security reasons once the expectation behind it (creating an app token to authenticate) is properly documented + ctx.HandleText(http.StatusUnauthorized, "Users with two-factor authentication enabled cannot perform HTTP/HTTPS operations via plain username and password. Please create and use a personal access token on the user settings page") return - } - authUsername, authPasswd, err = base.BasicAuthDecode(auths[1]) - if err != nil { - ctx.HandleText(http.StatusUnauthorized, "no basic auth and digit auth") + } else if !models.IsErrTwoFactorNotEnrolled(err) { + ctx.ServerError("IsErrTwoFactorNotEnrolled", err) return } - - // Check if username or password is a token - isUsernameToken := len(authPasswd) == 0 || authPasswd == "x-oauth-basic" - // Assume username is token - authToken := authUsername - if !isUsernameToken { - // Assume password is token - authToken = authPasswd - } - uid := sso.CheckOAuthAccessToken(authToken) - if uid != 0 { - ctx.Data["IsApiToken"] = true - - authUser, err = models.GetUserByID(uid) - if err != nil { - ctx.ServerError("GetUserByID", err) - return - } - } - // Assume password is a token. - token, err := models.GetAccessTokenBySHA(authToken) - if err == nil { - authUser, err = models.GetUserByID(token.UID) - if err != nil { - ctx.ServerError("GetUserByID", err) - return - } - - token.UpdatedUnix = timeutil.TimeStampNow() - if err = models.UpdateAccessToken(token); err != nil { - ctx.ServerError("UpdateAccessToken", err) - } - } else if !models.IsErrAccessTokenNotExist(err) && !models.IsErrAccessTokenEmpty(err) { - log.Error("GetAccessTokenBySha: %v", err) - } - - if authUser == nil { - // Check username and password - authUser, err = models.UserSignIn(authUsername, authPasswd) - if err != nil { - if models.IsErrUserProhibitLogin(err) { - ctx.HandleText(http.StatusForbidden, "User is not permitted to login") - return - } else if !models.IsErrUserNotExist(err) { - ctx.ServerError("UserSignIn error: %v", err) - return - } - } - - if authUser == nil { - ctx.HandleText(http.StatusUnauthorized, fmt.Sprintf("invalid credentials from %s", ctx.RemoteAddr())) - return - } - - _, err = models.GetTwoFactorByUID(authUser.ID) - if err == nil { - // TODO: This response should be changed to "invalid credentials" for security reasons once the expectation behind it (creating an app token to authenticate) is properly documented - ctx.HandleText(http.StatusUnauthorized, "Users with two-factor authentication enabled cannot perform HTTP/HTTPS operations via plain username and password. Please create and use a personal access token on the user settings page") - return - } else if !models.IsErrTwoFactorNotEnrolled(err) { - ctx.ServerError("IsErrTwoFactorNotEnrolled", err) - return - } - } } - if !authUser.IsActive || authUser.ProhibitLogin { + if !ctx.User.IsActive || ctx.User.ProhibitLogin { ctx.HandleText(http.StatusForbidden, "Your account is disabled.") return } if repoExist { - perm, err := models.GetUserRepoPermission(repo, authUser) + perm, err := models.GetUserRepoPermission(repo, ctx.User) if err != nil { ctx.ServerError("GetUserRepoPermission", err) return @@ -293,14 +212,14 @@ func httpBase(ctx *context.Context) (h *serviceHandler) { environ = []string{ models.EnvRepoUsername + "=" + username, models.EnvRepoName + "=" + reponame, - models.EnvPusherName + "=" + authUser.Name, - models.EnvPusherID + fmt.Sprintf("=%d", authUser.ID), + models.EnvPusherName + "=" + ctx.User.Name, + models.EnvPusherID + fmt.Sprintf("=%d", ctx.User.ID), models.EnvIsDeployKey + "=false", models.EnvAppURL + "=" + setting.AppURL, } - if !authUser.KeepEmailPrivate { - environ = append(environ, models.EnvPusherEmail+"="+authUser.Email) + if !ctx.User.KeepEmailPrivate { + environ = append(environ, models.EnvPusherEmail+"="+ctx.User.Email) } if isWiki { @@ -336,7 +255,7 @@ func httpBase(ctx *context.Context) (h *serviceHandler) { return } - repo, err = repo_service.PushCreateRepo(authUser, owner, reponame) + repo, err = repo_service.PushCreateRepo(ctx.User, owner, reponame) if err != nil { log.Error("pushCreateRepo: %v", err) ctx.Status(http.StatusNotFound) @@ -447,7 +366,26 @@ func (h *serviceHandler) setHeaderCacheForever() { h.w.Header().Set("Cache-Control", "public, max-age=31536000") } +func containsParentDirectorySeparator(v string) bool { + if !strings.Contains(v, "..") { + return false + } + for _, ent := range strings.FieldsFunc(v, isSlashRune) { + if ent == ".." { + return true + } + } + return false +} + +func isSlashRune(r rune) bool { return r == '/' || r == '\\' } + func (h *serviceHandler) sendFile(contentType, file string) { + if containsParentDirectorySeparator(file) { + log.Error("request file path contains invalid path: %v", file) + h.w.WriteHeader(http.StatusBadRequest) + return + } reqFile := path.Join(h.dir, file) fi, err := os.Stat(reqFile) diff --git a/routers/web/repo/http_test.go b/routers/web/repo/http_test.go new file mode 100644 index 000000000000..58ac1c07a129 --- /dev/null +++ b/routers/web/repo/http_test.go @@ -0,0 +1,43 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package repo + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestContainsParentDirectorySeparator(t *testing.T) { + tests := []struct { + v string + b bool + }{ + { + v: `user2/repo1/info/refs`, + b: false, + }, + { + v: `user2/repo1/HEAD`, + b: false, + }, + { + v: `user2/repo1/some.../strange_file...mp3`, + b: false, + }, + { + v: `user2/repo1/../../custom/conf/app.ini`, + b: true, + }, + { + v: `user2/repo1/objects/info/..\..\..\..\custom\conf\app.ini`, + b: true, + }, + } + + for i := range tests { + assert.EqualValues(t, tests[i].b, containsParentDirectorySeparator(tests[i].v)) + } +} diff --git a/routers/repo/issue.go b/routers/web/repo/issue.go similarity index 99% rename from routers/repo/issue.go rename to routers/web/repo/issue.go index 12726cd22c9a..9639ea82014a 100644 --- a/routers/repo/issue.go +++ b/routers/web/repo/issue.go @@ -372,6 +372,9 @@ func Issues(ctx *context.Context) { } issues(ctx, ctx.QueryInt64("milestone"), ctx.QueryInt64("project"), util.OptionalBoolOf(isPullList)) + if ctx.Written() { + return + } var err error // Get milestones @@ -828,7 +831,7 @@ func NewIssueChooseTemplate(ctx *context.Context) { ctx.HTML(http.StatusOK, tplIssueChoose) } -// ValidateRepoMetas check and returns repository's meta informations +// ValidateRepoMetas check and returns repository's meta information func ValidateRepoMetas(ctx *context.Context, form forms.CreateIssueForm, isPull bool) ([]int64, []int64, int64, int64) { var ( repo = ctx.Repo.Repository @@ -1134,6 +1137,7 @@ func ViewIssue(ctx *context.Context) { issue.RenderedContent, err = markdown.RenderString(&markup.RenderContext{ URLPrefix: ctx.Repo.RepoLink, Metas: ctx.Repo.Repository.ComposeMetas(), + GitRepo: ctx.Repo.GitRepo, }, issue.Content) if err != nil { ctx.ServerError("RenderString", err) @@ -1298,6 +1302,7 @@ func ViewIssue(ctx *context.Context) { comment.RenderedContent, err = markdown.RenderString(&markup.RenderContext{ URLPrefix: ctx.Repo.RepoLink, Metas: ctx.Repo.Repository.ComposeMetas(), + GitRepo: ctx.Repo.GitRepo, }, comment.Content) if err != nil { ctx.ServerError("RenderString", err) @@ -1373,6 +1378,7 @@ func ViewIssue(ctx *context.Context) { comment.RenderedContent, err = markdown.RenderString(&markup.RenderContext{ URLPrefix: ctx.Repo.RepoLink, Metas: ctx.Repo.Repository.ComposeMetas(), + GitRepo: ctx.Repo.GitRepo, }, comment.Content) if err != nil { ctx.ServerError("RenderString", err) @@ -1455,7 +1461,7 @@ func ViewIssue(ctx *context.Context) { } if perm.CanWrite(models.UnitTypeCode) { // Check if branch is not protected - if protected, err := pull.HeadRepo.IsProtectedBranch(pull.HeadBranch, ctx.User); err != nil { + if protected, err := pull.HeadRepo.IsProtectedBranch(pull.HeadBranch); err != nil { log.Error("IsProtectedBranch: %v", err) } else if !protected { canDelete = true @@ -1731,6 +1737,7 @@ func UpdateIssueContent(ctx *context.Context) { content, err := markdown.RenderString(&markup.RenderContext{ URLPrefix: ctx.Query("context"), Metas: ctx.Repo.Repository.ComposeMetas(), + GitRepo: ctx.Repo.GitRepo, }, issue.Content) if err != nil { ctx.ServerError("RenderString", err) @@ -2158,6 +2165,7 @@ func UpdateCommentContent(ctx *context.Context) { content, err := markdown.RenderString(&markup.RenderContext{ URLPrefix: ctx.Query("context"), Metas: ctx.Repo.Repository.ComposeMetas(), + GitRepo: ctx.Repo.GitRepo, }, comment.Content) if err != nil { ctx.ServerError("RenderString", err) diff --git a/routers/repo/issue_dependency.go b/routers/web/repo/issue_dependency.go similarity index 100% rename from routers/repo/issue_dependency.go rename to routers/web/repo/issue_dependency.go diff --git a/routers/repo/issue_label.go b/routers/web/repo/issue_label.go similarity index 100% rename from routers/repo/issue_label.go rename to routers/web/repo/issue_label.go diff --git a/routers/repo/issue_label_test.go b/routers/web/repo/issue_label_test.go similarity index 100% rename from routers/repo/issue_label_test.go rename to routers/web/repo/issue_label_test.go diff --git a/routers/repo/issue_lock.go b/routers/web/repo/issue_lock.go similarity index 100% rename from routers/repo/issue_lock.go rename to routers/web/repo/issue_lock.go diff --git a/routers/repo/issue_stopwatch.go b/routers/web/repo/issue_stopwatch.go similarity index 100% rename from routers/repo/issue_stopwatch.go rename to routers/web/repo/issue_stopwatch.go diff --git a/routers/repo/issue_test.go b/routers/web/repo/issue_test.go similarity index 100% rename from routers/repo/issue_test.go rename to routers/web/repo/issue_test.go diff --git a/routers/repo/issue_timetrack.go b/routers/web/repo/issue_timetrack.go similarity index 100% rename from routers/repo/issue_timetrack.go rename to routers/web/repo/issue_timetrack.go diff --git a/routers/repo/issue_watch.go b/routers/web/repo/issue_watch.go similarity index 100% rename from routers/repo/issue_watch.go rename to routers/web/repo/issue_watch.go diff --git a/routers/repo/lfs.go b/routers/web/repo/lfs.go similarity index 97% rename from routers/repo/lfs.go rename to routers/web/repo/lfs.go index 3a7ce2e23bd5..173ffb773f88 100644 --- a/routers/repo/lfs.go +++ b/routers/web/repo/lfs.go @@ -25,6 +25,7 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/storage" + "code.gitea.io/gitea/modules/typesniffer" ) const ( @@ -278,16 +279,16 @@ func LFSFileGet(ctx *context.Context) { } buf = buf[:n] - ctx.Data["IsTextFile"] = base.IsTextFile(buf) - isRepresentableAsText := base.IsRepresentableAsText(buf) + st := typesniffer.DetectContentType(buf) + ctx.Data["IsTextFile"] = st.IsText() + isRepresentableAsText := st.IsRepresentableAsText() fileSize := meta.Size ctx.Data["FileSize"] = meta.Size ctx.Data["RawFileLink"] = fmt.Sprintf("%s%s.git/info/lfs/objects/%s/%s", setting.AppURL, ctx.Repo.Repository.FullName(), meta.Oid, "direct") switch { case isRepresentableAsText: - // This will be true for SVGs. - if base.IsImageFile(buf) { + if st.IsSvgImage() { ctx.Data["IsImageFile"] = true } @@ -322,13 +323,13 @@ func LFSFileGet(ctx *context.Context) { } ctx.Data["LineNums"] = gotemplate.HTML(output.String()) - case base.IsPDFFile(buf): + case st.IsPDF(): ctx.Data["IsPDFFile"] = true - case base.IsVideoFile(buf): + case st.IsVideo(): ctx.Data["IsVideoFile"] = true - case base.IsAudioFile(buf): + case st.IsAudio(): ctx.Data["IsAudioFile"] = true - case base.IsImageFile(buf): + case st.IsImage() && (setting.UI.SVG.Enabled || !st.IsSvgImage()): ctx.Data["IsImageFile"] = true } ctx.HTML(http.StatusOK, tplSettingsLFSFile) @@ -414,7 +415,7 @@ func LFSPointerFiles(ctx *context.Context) { err = func() error { pointerChan := make(chan lfs.PointerBlob) errChan := make(chan error, 1) - go lfs.SearchPointerBlobs(ctx.Req.Context(), ctx.Repo.GitRepo, pointerChan, errChan) + go lfs.SearchPointerBlobs(ctx, ctx.Repo.GitRepo, pointerChan, errChan) numPointers := 0 var numAssociated, numNoExist, numAssociatable int diff --git a/routers/repo/main_test.go b/routers/web/repo/main_test.go similarity index 84% rename from routers/repo/main_test.go rename to routers/web/repo/main_test.go index 04bbeeb21178..47f266365fd7 100644 --- a/routers/repo/main_test.go +++ b/routers/web/repo/main_test.go @@ -12,5 +12,5 @@ import ( ) func TestMain(m *testing.M) { - models.MainTest(m, filepath.Join("..", "..")) + models.MainTest(m, filepath.Join("..", "..", "..")) } diff --git a/routers/repo/middlewares.go b/routers/web/repo/middlewares.go similarity index 100% rename from routers/repo/middlewares.go rename to routers/web/repo/middlewares.go diff --git a/routers/repo/migrate.go b/routers/web/repo/migrate.go similarity index 99% rename from routers/repo/migrate.go rename to routers/web/repo/migrate.go index 24d4ef4099bd..521a856dae44 100644 --- a/routers/repo/migrate.go +++ b/routers/web/repo/migrate.go @@ -101,7 +101,7 @@ func handleMigrateError(ctx *context.Context, owner *models.User, err error, nam ctx.RenderWithErr(ctx.Tr("repo.form.name_pattern_not_allowed", err.(models.ErrNamePatternNotAllowed).Pattern), tpl, form) default: remoteAddr, _ := forms.ParseRemoteAddr(form.CloneAddr, form.AuthUsername, form.AuthPassword) - err = util.URLSanitizedError(err, remoteAddr) + err = util.NewStringURLSanitizedError(err, remoteAddr, true) if strings.Contains(err.Error(), "Authentication failed") || strings.Contains(err.Error(), "Bad credentials") || strings.Contains(err.Error(), "could not read Username") { diff --git a/routers/repo/milestone.go b/routers/web/repo/milestone.go similarity index 99% rename from routers/repo/milestone.go rename to routers/web/repo/milestone.go index bb6b310cbe8d..4cdca38dd02b 100644 --- a/routers/repo/milestone.go +++ b/routers/web/repo/milestone.go @@ -88,6 +88,7 @@ func Milestones(ctx *context.Context) { m.RenderedContent, err = markdown.RenderString(&markup.RenderContext{ URLPrefix: ctx.Repo.RepoLink, Metas: ctx.Repo.Repository.ComposeMetas(), + GitRepo: ctx.Repo.GitRepo, }, m.Content) if err != nil { ctx.ServerError("RenderString", err) @@ -280,6 +281,7 @@ func MilestoneIssuesAndPulls(ctx *context.Context) { milestone.RenderedContent, err = markdown.RenderString(&markup.RenderContext{ URLPrefix: ctx.Repo.RepoLink, Metas: ctx.Repo.Repository.ComposeMetas(), + GitRepo: ctx.Repo.GitRepo, }, milestone.Content) if err != nil { ctx.ServerError("RenderString", err) diff --git a/routers/repo/projects.go b/routers/web/repo/projects.go similarity index 99% rename from routers/repo/projects.go rename to routers/web/repo/projects.go index eb0719995cb5..c7490893d5fe 100644 --- a/routers/repo/projects.go +++ b/routers/web/repo/projects.go @@ -81,6 +81,7 @@ func Projects(ctx *context.Context) { projects[i].RenderedContent, err = markdown.RenderString(&markup.RenderContext{ URLPrefix: ctx.Repo.RepoLink, Metas: ctx.Repo.Repository.ComposeMetas(), + GitRepo: ctx.Repo.GitRepo, }, projects[i].Description) if err != nil { ctx.ServerError("RenderString", err) @@ -322,6 +323,7 @@ func ViewProject(ctx *context.Context) { project.RenderedContent, err = markdown.RenderString(&markup.RenderContext{ URLPrefix: ctx.Repo.RepoLink, Metas: ctx.Repo.Repository.ComposeMetas(), + GitRepo: ctx.Repo.GitRepo, }, project.Description) if err != nil { ctx.ServerError("RenderString", err) diff --git a/routers/repo/projects_test.go b/routers/web/repo/projects_test.go similarity index 100% rename from routers/repo/projects_test.go rename to routers/web/repo/projects_test.go diff --git a/routers/repo/pull.go b/routers/web/repo/pull.go similarity index 93% rename from routers/repo/pull.go rename to routers/web/repo/pull.go index e13ad0cc033e..a29979964777 100644 --- a/routers/repo/pull.go +++ b/routers/web/repo/pull.go @@ -9,6 +9,7 @@ package repo import ( "container/list" "crypto/subtle" + "errors" "fmt" "net/http" "path" @@ -21,7 +22,6 @@ import ( "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/notification" - repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/upload" @@ -519,6 +519,8 @@ func PrepareViewPullInfo(ctx *context.Context, issue *models.Issue) *git.Compare return nil } + ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes + if pull.IsWorkInProgress() { ctx.Data["IsPullWorkInProgress"] = true ctx.Data["WorkInProgressPrefix"] = pull.GetWorkInProgressPrefix() @@ -692,6 +694,10 @@ func ViewPullFiles(ctx *context.Context) { getBranchData(ctx, issue) ctx.Data["IsIssuePoster"] = ctx.IsSigned && issue.IsPoster(ctx.User.ID) ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) + + ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled + upload.AddUploadContext(ctx, "comment") + ctx.HTML(http.StatusOK, tplPullFiles) } @@ -959,6 +965,22 @@ func MergePullRequest(ctx *context.Context) { } log.Trace("Pull request merged: %d", pr.ID) + + if form.DeleteBranchAfterMerge { + var headRepo *git.Repository + if ctx.Repo != nil && ctx.Repo.Repository != nil && pr.HeadRepoID == ctx.Repo.Repository.ID && ctx.Repo.GitRepo != nil { + headRepo = ctx.Repo.GitRepo + } else { + headRepo, err = git.OpenRepository(pr.HeadRepo.RepoPath()) + if err != nil { + ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.HeadRepo.RepoPath()), err) + return + } + defer headRepo.Close() + } + deleteBranch(ctx, pr, headRepo) + } + ctx.Redirect(ctx.Repo.RepoLink + "/pulls/" + fmt.Sprint(pr.Index)) } @@ -1164,19 +1186,35 @@ func CleanUpPullRequest(ctx *context.Context) { fullBranchName := pr.HeadRepo.Owner.Name + "/" + pr.HeadBranch - gitRepo, err := git.OpenRepository(pr.HeadRepo.RepoPath()) - if err != nil { - ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.HeadRepo.RepoPath()), err) - return + var gitBaseRepo *git.Repository + + // Assume that the base repo is the current context (almost certainly) + if ctx.Repo != nil && ctx.Repo.Repository != nil && ctx.Repo.Repository.ID == pr.BaseRepoID && ctx.Repo.GitRepo != nil { + gitBaseRepo = ctx.Repo.GitRepo + } else { + // If not just open it + gitBaseRepo, err = git.OpenRepository(pr.BaseRepo.RepoPath()) + if err != nil { + ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.BaseRepo.RepoPath()), err) + return + } + defer gitBaseRepo.Close() } - defer gitRepo.Close() - gitBaseRepo, err := git.OpenRepository(pr.BaseRepo.RepoPath()) - if err != nil { - ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.BaseRepo.RepoPath()), err) - return + // Now assume that the head repo is the same as the base repo (reasonable chance) + gitRepo := gitBaseRepo + // But if not: is it the same as the context? + if pr.BaseRepoID != pr.HeadRepoID && ctx.Repo != nil && ctx.Repo.Repository != nil && ctx.Repo.Repository.ID == pr.HeadRepoID && ctx.Repo.GitRepo != nil { + gitRepo = ctx.Repo.GitRepo + } else if pr.BaseRepoID != pr.HeadRepoID { + // Otherwise just load it up + gitRepo, err = git.OpenRepository(pr.HeadRepo.RepoPath()) + if err != nil { + ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.HeadRepo.RepoPath()), err) + return + } + defer gitRepo.Close() } - defer gitBaseRepo.Close() defer func() { ctx.JSON(http.StatusOK, map[string]interface{}{ @@ -1184,20 +1222,6 @@ func CleanUpPullRequest(ctx *context.Context) { }) }() - if pr.HeadBranch == pr.HeadRepo.DefaultBranch || !gitRepo.IsBranchExist(pr.HeadBranch) { - ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName)) - return - } - - // Check if branch is not protected - if protected, err := pr.HeadRepo.IsProtectedBranch(pr.HeadBranch, ctx.User); err != nil || protected { - if err != nil { - log.Error("HeadRepo.IsProtectedBranch: %v", err) - } - ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName)) - return - } - // Check if branch has no new commits headCommitID, err := gitBaseRepo.GetRefCommitID(pr.GetGitRefName()) if err != nil { @@ -1216,28 +1240,27 @@ func CleanUpPullRequest(ctx *context.Context) { return } - if err := gitRepo.DeleteBranch(pr.HeadBranch, git.DeleteBranchOptions{ - Force: true, - }); err != nil { - log.Error("DeleteBranch: %v", err) - ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName)) - return - } + deleteBranch(ctx, pr, gitRepo) +} - if err := repo_service.PushUpdate( - &repo_module.PushUpdateOptions{ - RefFullName: git.BranchPrefix + pr.HeadBranch, - OldCommitID: branchCommitID, - NewCommitID: git.EmptySHA, - PusherID: ctx.User.ID, - PusherName: ctx.User.Name, - RepoUserName: pr.HeadRepo.Owner.Name, - RepoName: pr.HeadRepo.Name, - }); err != nil { - log.Error("Update: %v", err) +func deleteBranch(ctx *context.Context, pr *models.PullRequest, gitRepo *git.Repository) { + fullBranchName := pr.HeadRepo.Owner.Name + "/" + pr.HeadBranch + if err := repo_service.DeleteBranch(ctx.User, pr.HeadRepo, gitRepo, pr.HeadBranch); err != nil { + switch { + case git.IsErrBranchNotExist(err): + ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName)) + case errors.Is(err, repo_service.ErrBranchIsDefault): + ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName)) + case errors.Is(err, repo_service.ErrBranchIsProtected): + ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName)) + default: + log.Error("DeleteBranch: %v", err) + ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName)) + } + return } - if err := models.AddDeletePRBranchComment(ctx.User, pr.BaseRepo, issue.ID, pr.HeadBranch); err != nil { + if err := models.AddDeletePRBranchComment(ctx.User, pr.BaseRepo, pr.IssueID, pr.HeadBranch); err != nil { // Do not fail here as branch has already been deleted log.Error("DeleteBranch: %v", err) } diff --git a/routers/repo/pull_review.go b/routers/web/repo/pull_review.go similarity index 97% rename from routers/repo/pull_review.go rename to routers/web/repo/pull_review.go index 9e505c3db373..36eee3f377b8 100644 --- a/routers/repo/pull_review.go +++ b/routers/web/repo/pull_review.go @@ -12,6 +12,7 @@ import ( "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/services/forms" pull_service "code.gitea.io/gitea/services/pull" @@ -211,7 +212,12 @@ func SubmitReview(ctx *context.Context) { } } - _, comm, err := pull_service.SubmitReview(ctx.User, ctx.Repo.GitRepo, issue, reviewType, form.Content, form.CommitID) + var attachments []string + if setting.Attachment.Enabled { + attachments = form.Files + } + + _, comm, err := pull_service.SubmitReview(ctx.User, ctx.Repo.GitRepo, issue, reviewType, form.Content, form.CommitID, attachments) if err != nil { if models.IsContentEmptyErr(err) { ctx.Flash.Error(ctx.Tr("repo.issues.review.content.empty")) diff --git a/routers/repo/release.go b/routers/web/repo/release.go similarity index 91% rename from routers/repo/release.go rename to routers/web/repo/release.go index abce3e9ac1a2..0665496d44c3 100644 --- a/routers/repo/release.go +++ b/routers/web/repo/release.go @@ -70,6 +70,11 @@ func TagsList(ctx *context.Context) { func releasesOrTags(ctx *context.Context, isTagList bool) { ctx.Data["PageIsReleaseList"] = true ctx.Data["DefaultBranch"] = ctx.Repo.Repository.DefaultBranch + ctx.Data["IsViewBranch"] = false + ctx.Data["IsViewTag"] = true + // Disable the showCreateNewBranch form in the dropdown on this page. + ctx.Data["CanCreateBranch"] = false + ctx.Data["HideBranchesInDropdown"] = true if isTagList { ctx.Data["Title"] = ctx.Tr("repo.release.tags") @@ -79,6 +84,13 @@ func releasesOrTags(ctx *context.Context, isTagList bool) { ctx.Data["PageIsTagList"] = false } + tags, err := ctx.Repo.GitRepo.GetTags() + if err != nil { + ctx.ServerError("GetTags", err) + return + } + ctx.Data["Tags"] = tags + writeAccess := ctx.Repo.CanWrite(models.UnitTypeReleases) ctx.Data["CanCreateRelease"] = writeAccess && !ctx.Repo.Repository.IsArchived @@ -87,7 +99,7 @@ func releasesOrTags(ctx *context.Context, isTagList bool) { Page: ctx.QueryInt("page"), PageSize: convert.ToCorrectPageSize(ctx.QueryInt("limit")), }, - IncludeDrafts: writeAccess, + IncludeDrafts: writeAccess && !isTagList, IncludeTags: isTagList, } @@ -129,18 +141,25 @@ func releasesOrTags(ctx *context.Context, isTagList bool) { } cacheUsers[r.PublisherID] = r.Publisher } - if err := calReleaseNumCommitsBehind(ctx.Repo, r, countCache); err != nil { - ctx.ServerError("calReleaseNumCommitsBehind", err) - return - } + r.Note, err = markdown.RenderString(&markup.RenderContext{ URLPrefix: ctx.Repo.RepoLink, Metas: ctx.Repo.Repository.ComposeMetas(), + GitRepo: ctx.Repo.GitRepo, }, r.Note) if err != nil { ctx.ServerError("RenderString", err) return } + + if r.IsDraft { + continue + } + + if err := calReleaseNumCommitsBehind(ctx.Repo, r, countCache); err != nil { + ctx.ServerError("calReleaseNumCommitsBehind", err) + return + } } ctx.Data["Releases"] = releases @@ -186,13 +205,16 @@ func SingleRelease(ctx *context.Context) { return } } - if err := calReleaseNumCommitsBehind(ctx.Repo, release, make(map[string]int64)); err != nil { - ctx.ServerError("calReleaseNumCommitsBehind", err) - return + if !release.IsDraft { + if err := calReleaseNumCommitsBehind(ctx.Repo, release, make(map[string]int64)); err != nil { + ctx.ServerError("calReleaseNumCommitsBehind", err) + return + } } release.Note, err = markdown.RenderString(&markup.RenderContext{ URLPrefix: ctx.Repo.RepoLink, Metas: ctx.Repo.Repository.ComposeMetas(), + GitRepo: ctx.Repo.GitRepo, }, release.Note) if err != nil { ctx.ServerError("RenderString", err) @@ -300,6 +322,18 @@ func NewReleasePost(ctx *context.Context) { return } + if models.IsErrInvalidTagName(err) { + ctx.Flash.Error(ctx.Tr("repo.release.tag_name_invalid")) + ctx.Redirect(ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()) + return + } + + if models.IsErrProtectedTagName(err) { + ctx.Flash.Error(ctx.Tr("repo.release.tag_name_protected")) + ctx.Redirect(ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()) + return + } + ctx.ServerError("releaseservice.CreateNewTag", err) return } @@ -311,7 +345,9 @@ func NewReleasePost(ctx *context.Context) { rel = &models.Release{ RepoID: ctx.Repo.Repository.ID, + Repo: ctx.Repo.Repository, PublisherID: ctx.User.ID, + Publisher: ctx.User, Title: form.Title, TagName: form.TagName, Target: form.Target, @@ -328,6 +364,8 @@ func NewReleasePost(ctx *context.Context) { ctx.RenderWithErr(ctx.Tr("repo.release.tag_name_already_exist"), tplReleaseNew, &form) case models.IsErrInvalidTagName(err): ctx.RenderWithErr(ctx.Tr("repo.release.tag_name_invalid"), tplReleaseNew, &form) + case models.IsErrProtectedTagName(err): + ctx.RenderWithErr(ctx.Tr("repo.release.tag_name_protected"), tplReleaseNew, &form) default: ctx.ServerError("CreateRelease", err) } diff --git a/routers/repo/release_test.go b/routers/web/repo/release_test.go similarity index 100% rename from routers/repo/release_test.go rename to routers/web/repo/release_test.go diff --git a/routers/repo/repo.go b/routers/web/repo/repo.go similarity index 80% rename from routers/repo/repo.go rename to routers/web/repo/repo.go index 69471a83d398..919fd4620d25 100644 --- a/routers/repo/repo.go +++ b/routers/web/repo/repo.go @@ -15,8 +15,10 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/storage" "code.gitea.io/gitea/modules/web" archiver_service "code.gitea.io/gitea/services/archiver" "code.gitea.io/gitea/services/forms" @@ -367,25 +369,85 @@ func RedirectDownload(ctx *context.Context) { // Download an archive of a repository func Download(ctx *context.Context) { uri := ctx.Params("*") - aReq := archiver_service.DeriveRequestFrom(ctx, uri) - + aReq, err := archiver_service.NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, uri) + if err != nil { + ctx.ServerError("archiver_service.NewRequest", err) + return + } if aReq == nil { ctx.Error(http.StatusNotFound) return } - downloadName := ctx.Repo.Repository.Name + "-" + aReq.GetArchiveName() - complete := aReq.IsComplete() - if !complete { - aReq = archiver_service.ArchiveRepository(aReq) - complete = aReq.WaitForCompletion(ctx) + archiver, err := models.GetRepoArchiver(models.DefaultDBContext(), aReq.RepoID, aReq.Type, aReq.CommitID) + if err != nil { + ctx.ServerError("models.GetRepoArchiver", err) + return + } + if archiver != nil && archiver.Status == models.RepoArchiverReady { + download(ctx, aReq.GetArchiveName(), archiver) + return } - if complete { - ctx.ServeFile(aReq.GetArchivePath(), downloadName) - } else { - ctx.Error(http.StatusNotFound) + if err := archiver_service.StartArchive(aReq); err != nil { + ctx.ServerError("archiver_service.StartArchive", err) + return + } + + var times int + var t = time.NewTicker(time.Second * 1) + defer t.Stop() + + for { + select { + case <-graceful.GetManager().HammerContext().Done(): + log.Warn("exit archive download because system stop") + return + case <-t.C: + if times > 20 { + ctx.ServerError("wait download timeout", nil) + return + } + times++ + archiver, err = models.GetRepoArchiver(models.DefaultDBContext(), aReq.RepoID, aReq.Type, aReq.CommitID) + if err != nil { + ctx.ServerError("archiver_service.StartArchive", err) + return + } + if archiver != nil && archiver.Status == models.RepoArchiverReady { + download(ctx, aReq.GetArchiveName(), archiver) + return + } + } + } +} + +func download(ctx *context.Context, archiveName string, archiver *models.RepoArchiver) { + downloadName := ctx.Repo.Repository.Name + "-" + archiveName + + rPath, err := archiver.RelativePath() + if err != nil { + ctx.ServerError("archiver.RelativePath", err) + return + } + + if setting.RepoArchive.ServeDirect { + //If we have a signed url (S3, object storage), redirect to this directly. + u, err := storage.RepoArchives.URL(rPath, downloadName) + if u != nil && err == nil { + ctx.Redirect(u.String()) + return + } + } + + //If we have matched and access to release or issue + fr, err := storage.RepoArchives.Open(rPath) + if err != nil { + ctx.ServerError("Open", err) + return } + defer fr.Close() + ctx.ServeStream(fr, downloadName) } // InitiateDownload will enqueue an archival request, as needed. It may submit @@ -393,20 +455,34 @@ func Download(ctx *context.Context) { // kind of drop it on the floor if this is the case. func InitiateDownload(ctx *context.Context) { uri := ctx.Params("*") - aReq := archiver_service.DeriveRequestFrom(ctx, uri) - + aReq, err := archiver_service.NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, uri) + if err != nil { + ctx.ServerError("archiver_service.NewRequest", err) + return + } if aReq == nil { ctx.Error(http.StatusNotFound) return } - complete := aReq.IsComplete() - if !complete { - aReq = archiver_service.ArchiveRepository(aReq) - complete, _ = aReq.TimedWaitForCompletion(ctx, 2*time.Second) + archiver, err := models.GetRepoArchiver(models.DefaultDBContext(), aReq.RepoID, aReq.Type, aReq.CommitID) + if err != nil { + ctx.ServerError("archiver_service.StartArchive", err) + return + } + if archiver == nil || archiver.Status != models.RepoArchiverReady { + if err := archiver_service.StartArchive(aReq); err != nil { + ctx.ServerError("archiver_service.StartArchive", err) + return + } + } + + var completed bool + if archiver != nil && archiver.Status == models.RepoArchiverReady { + completed = true } ctx.JSON(http.StatusOK, map[string]interface{}{ - "complete": complete, + "complete": completed, }) } diff --git a/routers/repo/search.go b/routers/web/repo/search.go similarity index 100% rename from routers/repo/search.go rename to routers/web/repo/search.go diff --git a/routers/repo/setting.go b/routers/web/repo/setting.go similarity index 88% rename from routers/repo/setting.go rename to routers/web/repo/setting.go index b37ac03112cf..0a84f15bf0bf 100644 --- a/routers/repo/setting.go +++ b/routers/web/repo/setting.go @@ -10,6 +10,7 @@ import ( "fmt" "io/ioutil" "net/http" + "strconv" "strings" "time" @@ -24,6 +25,8 @@ import ( "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/typesniffer" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/validation" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/routers/utils" @@ -37,6 +40,7 @@ const ( tplSettingsOptions base.TplName = "repo/settings/options" tplCollaboration base.TplName = "repo/settings/collaboration" tplBranches base.TplName = "repo/settings/branches" + tplTags base.TplName = "repo/settings/tags" tplGithooks base.TplName = "repo/settings/githooks" tplGithookEdit base.TplName = "repo/settings/githook_edit" tplDeployKeys base.TplName = "repo/settings/deploy_keys" @@ -48,6 +52,8 @@ func Settings(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("repo.settings") ctx.Data["PageIsSettingsOptions"] = true ctx.Data["ForcePrivate"] = setting.Repository.ForcePrivate + ctx.Data["DisabledMirrors"] = setting.Repository.DisableMirrors + ctx.Data["DefaultMirrorInterval"] = setting.Mirror.DefaultInterval signing, _ := models.SigningKey(ctx.Repo.Repository.RepoPath()) ctx.Data["SigningKeyAvailable"] = len(signing) > 0 @@ -166,6 +172,11 @@ func SettingsPost(ctx *context.Context) { } } + u, _ := git.GetRemoteAddress(ctx.Repo.Repository.RepoPath(), ctx.Repo.Mirror.GetRemoteName()) + if u.User != nil && form.MirrorPassword == "" && form.MirrorUsername == u.User.Username() { + form.MirrorPassword, _ = u.User.Password() + } + address, err := forms.ParseRemoteAddr(form.MirrorAddress, form.MirrorUsername, form.MirrorPassword) if err == nil { err = migrations.IsMigrateURLAllowed(address, ctx.User) @@ -219,6 +230,92 @@ func SettingsPost(ctx *context.Context) { ctx.Flash.Info(ctx.Tr("repo.settings.mirror_sync_in_progress")) ctx.Redirect(repo.Link() + "/settings") + case "push-mirror-sync": + m, err := selectPushMirrorByForm(form, repo) + if err != nil { + ctx.NotFound("", nil) + return + } + + mirror_service.AddPushMirrorToQueue(m.ID) + + ctx.Flash.Info(ctx.Tr("repo.settings.mirror_sync_in_progress")) + ctx.Redirect(repo.Link() + "/settings") + + case "push-mirror-remove": + // This section doesn't require repo_name/RepoName to be set in the form, don't show it + // as an error on the UI for this action + ctx.Data["Err_RepoName"] = nil + + m, err := selectPushMirrorByForm(form, repo) + if err != nil { + ctx.NotFound("", nil) + return + } + + if err = mirror_service.RemovePushMirrorRemote(m); err != nil { + ctx.ServerError("RemovePushMirrorRemote", err) + return + } + + if err = models.DeletePushMirrorByID(m.ID); err != nil { + ctx.ServerError("DeletePushMirrorByID", err) + return + } + + ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success")) + ctx.Redirect(repo.Link() + "/settings") + + case "push-mirror-add": + // This section doesn't require repo_name/RepoName to be set in the form, don't show it + // as an error on the UI for this action + ctx.Data["Err_RepoName"] = nil + + interval, err := time.ParseDuration(form.PushMirrorInterval) + if err != nil || (interval != 0 && interval < setting.Mirror.MinInterval) { + ctx.Data["Err_PushMirrorInterval"] = true + ctx.RenderWithErr(ctx.Tr("repo.mirror_interval_invalid"), tplSettingsOptions, &form) + return + } + + address, err := forms.ParseRemoteAddr(form.PushMirrorAddress, form.PushMirrorUsername, form.PushMirrorPassword) + if err == nil { + err = migrations.IsMigrateURLAllowed(address, ctx.User) + } + if err != nil { + ctx.Data["Err_PushMirrorAddress"] = true + handleSettingRemoteAddrError(ctx, err, form) + return + } + + remoteSuffix, err := util.RandomString(10) + if err != nil { + ctx.ServerError("RandomString", err) + return + } + + m := &models.PushMirror{ + RepoID: repo.ID, + Repo: repo, + RemoteName: fmt.Sprintf("remote_mirror_%s", remoteSuffix), + Interval: interval, + } + if err := models.InsertPushMirror(m); err != nil { + ctx.ServerError("InsertPushMirror", err) + return + } + + if err := mirror_service.AddPushMirrorRemote(m, address); err != nil { + if err := models.DeletePushMirrorByID(m.ID); err != nil { + log.Error("DeletePushMirrorByID %v", err) + } + ctx.ServerError("AddPushMirrorRemote", err) + return + } + + ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success")) + ctx.Redirect(repo.Link() + "/settings") + case "advanced": var repoChanged bool var units []models.RepoUnit @@ -319,14 +416,15 @@ func SettingsPost(ctx *context.Context) { RepoID: repo.ID, Type: models.UnitTypePullRequests, Config: &models.PullRequestsConfig{ - IgnoreWhitespaceConflicts: form.PullsIgnoreWhitespace, - AllowMerge: form.PullsAllowMerge, - AllowRebase: form.PullsAllowRebase, - AllowRebaseMerge: form.PullsAllowRebaseMerge, - AllowSquash: form.PullsAllowSquash, - AllowManualMerge: form.PullsAllowManualMerge, - AutodetectManualMerge: form.EnableAutodetectManualMerge, - DefaultMergeStyle: models.MergeStyle(form.PullsDefaultMergeStyle), + IgnoreWhitespaceConflicts: form.PullsIgnoreWhitespace, + AllowMerge: form.PullsAllowMerge, + AllowRebase: form.PullsAllowRebase, + AllowRebaseMerge: form.PullsAllowRebaseMerge, + AllowSquash: form.PullsAllowSquash, + AllowManualMerge: form.PullsAllowManualMerge, + AutodetectManualMerge: form.EnableAutodetectManualMerge, + DefaultDeleteBranchAfterMerge: form.DefaultDeleteBranchAfterMerge, + DefaultMergeStyle: models.MergeStyle(form.PullsDefaultMergeStyle), }, }) } else if !models.UnitTypePullRequests.UnitGlobalDisabled() { @@ -544,6 +642,11 @@ func SettingsPost(ctx *context.Context) { return } + // Close the gitrepository before doing this. + if ctx.Repo.GitRepo != nil { + ctx.Repo.GitRepo.Close() + } + if err := repo_service.DeleteRepository(ctx.User, ctx.Repo.Repository); err != nil { ctx.ServerError("DeleteRepository", err) return @@ -1010,7 +1113,8 @@ func UpdateAvatarSetting(ctx *context.Context, form forms.AvatarForm) error { if err != nil { return fmt.Errorf("ioutil.ReadAll: %v", err) } - if !base.IsImageFile(data) { + st := typesniffer.DetectContentType(data) + if !(st.IsImage() && !st.IsSvgImage()) { return errors.New(ctx.Tr("settings.uploaded_avatar_not_a_image")) } if err = ctxRepo.UploadAvatar(data); err != nil { @@ -1038,3 +1142,22 @@ func SettingsDeleteAvatar(ctx *context.Context) { } ctx.Redirect(ctx.Repo.RepoLink + "/settings") } + +func selectPushMirrorByForm(form *forms.RepoSettingForm, repo *models.Repository) (*models.PushMirror, error) { + id, err := strconv.ParseInt(form.PushMirrorID, 10, 64) + if err != nil { + return nil, err + } + + if err = repo.LoadPushMirrors(); err != nil { + return nil, err + } + + for _, m := range repo.PushMirrors { + if m.ID == id { + return m, nil + } + } + + return nil, fmt.Errorf("PushMirror[%v] not associated to repository %v", id, repo) +} diff --git a/routers/repo/setting_protected_branch.go b/routers/web/repo/setting_protected_branch.go similarity index 100% rename from routers/repo/setting_protected_branch.go rename to routers/web/repo/setting_protected_branch.go diff --git a/routers/repo/settings_test.go b/routers/web/repo/settings_test.go similarity index 100% rename from routers/repo/settings_test.go rename to routers/web/repo/settings_test.go diff --git a/routers/web/repo/tag.go b/routers/web/repo/tag.go new file mode 100644 index 000000000000..7928591371b7 --- /dev/null +++ b/routers/web/repo/tag.go @@ -0,0 +1,182 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package repo + +import ( + "fmt" + "net/http" + "strings" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/base" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/web" + "code.gitea.io/gitea/services/forms" +) + +// Tags render the page to protect tags +func Tags(ctx *context.Context) { + if setTagsContext(ctx) != nil { + return + } + + ctx.HTML(http.StatusOK, tplTags) +} + +// NewProtectedTagPost handles creation of a protect tag +func NewProtectedTagPost(ctx *context.Context) { + if setTagsContext(ctx) != nil { + return + } + + if ctx.HasError() { + ctx.HTML(http.StatusOK, tplTags) + return + } + + repo := ctx.Repo.Repository + form := web.GetForm(ctx).(*forms.ProtectTagForm) + + pt := &models.ProtectedTag{ + RepoID: repo.ID, + NamePattern: strings.TrimSpace(form.NamePattern), + } + + if strings.TrimSpace(form.AllowlistUsers) != "" { + pt.AllowlistUserIDs, _ = base.StringsToInt64s(strings.Split(form.AllowlistUsers, ",")) + } + if strings.TrimSpace(form.AllowlistTeams) != "" { + pt.AllowlistTeamIDs, _ = base.StringsToInt64s(strings.Split(form.AllowlistTeams, ",")) + } + + if err := models.InsertProtectedTag(pt); err != nil { + ctx.ServerError("InsertProtectedTag", err) + return + } + + ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success")) + ctx.Redirect(setting.AppSubURL + ctx.Req.URL.Path) +} + +// EditProtectedTag render the page to edit a protect tag +func EditProtectedTag(ctx *context.Context) { + if setTagsContext(ctx) != nil { + return + } + + ctx.Data["PageIsEditProtectedTag"] = true + + pt := selectProtectedTagByContext(ctx) + if pt == nil { + return + } + + ctx.Data["name_pattern"] = pt.NamePattern + ctx.Data["allowlist_users"] = strings.Join(base.Int64sToStrings(pt.AllowlistUserIDs), ",") + ctx.Data["allowlist_teams"] = strings.Join(base.Int64sToStrings(pt.AllowlistTeamIDs), ",") + + ctx.HTML(http.StatusOK, tplTags) +} + +// EditProtectedTagPost handles creation of a protect tag +func EditProtectedTagPost(ctx *context.Context) { + if setTagsContext(ctx) != nil { + return + } + + ctx.Data["PageIsEditProtectedTag"] = true + + if ctx.HasError() { + ctx.HTML(http.StatusOK, tplTags) + return + } + + pt := selectProtectedTagByContext(ctx) + if pt == nil { + return + } + + form := web.GetForm(ctx).(*forms.ProtectTagForm) + + pt.NamePattern = strings.TrimSpace(form.NamePattern) + pt.AllowlistUserIDs, _ = base.StringsToInt64s(strings.Split(form.AllowlistUsers, ",")) + pt.AllowlistTeamIDs, _ = base.StringsToInt64s(strings.Split(form.AllowlistTeams, ",")) + + if err := models.UpdateProtectedTag(pt); err != nil { + ctx.ServerError("UpdateProtectedTag", err) + return + } + + ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success")) + ctx.Redirect(ctx.Repo.Repository.Link() + "/settings/tags") +} + +// DeleteProtectedTagPost handles deletion of a protected tag +func DeleteProtectedTagPost(ctx *context.Context) { + pt := selectProtectedTagByContext(ctx) + if pt == nil { + return + } + + if err := models.DeleteProtectedTag(pt); err != nil { + ctx.ServerError("DeleteProtectedTag", err) + return + } + + ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success")) + ctx.Redirect(ctx.Repo.Repository.Link() + "/settings/tags") +} + +func setTagsContext(ctx *context.Context) error { + ctx.Data["Title"] = ctx.Tr("repo.settings") + ctx.Data["PageIsSettingsTags"] = true + + protectedTags, err := ctx.Repo.Repository.GetProtectedTags() + if err != nil { + ctx.ServerError("GetProtectedTags", err) + return err + } + ctx.Data["ProtectedTags"] = protectedTags + + users, err := ctx.Repo.Repository.GetReaders() + if err != nil { + ctx.ServerError("Repo.Repository.GetReaders", err) + return err + } + ctx.Data["Users"] = users + + if ctx.Repo.Owner.IsOrganization() { + teams, err := ctx.Repo.Owner.TeamsWithAccessToRepo(ctx.Repo.Repository.ID, models.AccessModeRead) + if err != nil { + ctx.ServerError("Repo.Owner.TeamsWithAccessToRepo", err) + return err + } + ctx.Data["Teams"] = teams + } + + return nil +} + +func selectProtectedTagByContext(ctx *context.Context) *models.ProtectedTag { + id := ctx.QueryInt64("id") + if id == 0 { + id = ctx.ParamsInt64(":id") + } + + tag, err := models.GetProtectedTagByID(id) + if err != nil { + ctx.ServerError("GetProtectedTagByID", err) + return nil + } + + if tag != nil && tag.RepoID == ctx.Repo.Repository.ID { + return tag + } + + ctx.NotFound("", fmt.Errorf("ProtectedTag[%v] not associated to repository %v", id, ctx.Repo.Repository)) + + return nil +} diff --git a/routers/repo/topic.go b/routers/web/repo/topic.go similarity index 100% rename from routers/repo/topic.go rename to routers/web/repo/topic.go diff --git a/routers/repo/view.go b/routers/web/repo/view.go similarity index 96% rename from routers/repo/view.go rename to routers/web/repo/view.go index 285cacc2dfa9..21bd80c40667 100644 --- a/routers/repo/view.go +++ b/routers/web/repo/view.go @@ -29,6 +29,8 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/typesniffer" ) const ( @@ -145,7 +147,7 @@ func renderDirectory(ctx *context.Context, treeLink string) { } var latestCommit *git.Commit - ctx.Data["Files"], latestCommit, err = entries.GetCommitsInfo(ctx.Repo.Commit, ctx.Repo.TreePath, c) + ctx.Data["Files"], latestCommit, err = entries.GetCommitsInfo(ctx, ctx.Repo.Commit, ctx.Repo.TreePath, c) if err != nil { ctx.ServerError("GetCommitsInfo", err) return @@ -265,7 +267,9 @@ func renderDirectory(ctx *context.Context, treeLink string) { n, _ := dataRc.Read(buf) buf = buf[:n] - isTextFile := base.IsTextFile(buf) + st := typesniffer.DetectContentType(buf) + isTextFile := st.IsText() + ctx.Data["FileIsText"] = isTextFile ctx.Data["FileName"] = readmeFile.name fileSize := int64(0) @@ -302,7 +306,8 @@ func renderDirectory(ctx *context.Context, treeLink string) { } buf = buf[:n] - isTextFile = base.IsTextFile(buf) + st = typesniffer.DetectContentType(buf) + isTextFile = st.IsText() ctx.Data["IsTextFile"] = isTextFile fileSize = meta.Size @@ -331,9 +336,11 @@ func renderDirectory(ctx *context.Context, treeLink string) { ctx.Data["MarkupType"] = string(markupType) var result strings.Builder err := markup.Render(&markup.RenderContext{ + Ctx: ctx, Filename: readmeFile.name, URLPrefix: readmeTreelink, Metas: ctx.Repo.Repository.ComposeDocumentMetas(), + GitRepo: ctx.Repo.GitRepo, }, rd, &result) if err != nil { log.Error("Render failed: %v then fallback", err) @@ -405,7 +412,9 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink st n, _ := dataRc.Read(buf) buf = buf[:n] - isTextFile := base.IsTextFile(buf) + st := typesniffer.DetectContentType(buf) + isTextFile := st.IsText() + isLFSFile := false isDisplayingSource := ctx.Query("display") == "source" isDisplayingRendered := !isDisplayingSource @@ -441,14 +450,16 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink st } buf = buf[:n] - isTextFile = base.IsTextFile(buf) + st = typesniffer.DetectContentType(buf) + isTextFile = st.IsText() + fileSize = meta.Size ctx.Data["RawFileLink"] = fmt.Sprintf("%s/media/%s/%s", ctx.Repo.RepoLink, ctx.Repo.BranchNameSubURL(), ctx.Repo.TreePath) } } } - isRepresentableAsText := base.IsRepresentableAsText(buf) + isRepresentableAsText := st.IsRepresentableAsText() if !isRepresentableAsText { // If we can't show plain text, always try to render. isDisplayingSource = false @@ -483,8 +494,7 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink st switch { case isRepresentableAsText: - // This will be true for SVGs. - if base.IsImageFile(buf) { + if st.IsSvgImage() { ctx.Data["IsImageFile"] = true ctx.Data["HasSourceRenderedToggle"] = true } @@ -502,9 +512,11 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink st ctx.Data["MarkupType"] = markupType var result strings.Builder err := markup.Render(&markup.RenderContext{ + Ctx: ctx, Filename: blob.Name(), URLPrefix: path.Dir(treeLink), Metas: ctx.Repo.Repository.ComposeDocumentMetas(), + GitRepo: ctx.Repo.GitRepo, }, rd, &result) if err != nil { ctx.ServerError("Render", err) @@ -540,13 +552,13 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink st } } - case base.IsPDFFile(buf): + case st.IsPDF(): ctx.Data["IsPDFFile"] = true - case base.IsVideoFile(buf): + case st.IsVideo(): ctx.Data["IsVideoFile"] = true - case base.IsAudioFile(buf): + case st.IsAudio(): ctx.Data["IsAudioFile"] = true - case base.IsImageFile(buf): + case st.IsImage() && (setting.UI.SVG.Enabled || !st.IsSvgImage()): ctx.Data["IsImageFile"] = true default: if fileSize >= setting.UI.MaxDisplayFileSize { @@ -560,9 +572,11 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink st ctx.Data["MarkupType"] = markupType var result strings.Builder err := markup.Render(&markup.RenderContext{ + Ctx: ctx, Filename: blob.Name(), URLPrefix: path.Dir(treeLink), Metas: ctx.Repo.Repository.ComposeDocumentMetas(), + GitRepo: ctx.Repo.GitRepo, }, rd, &result) if err != nil { ctx.ServerError("Render", err) @@ -614,6 +628,7 @@ func Home(ctx *context.Context) { ctx.Data["Repo"] = ctx.Repo ctx.Data["MigrateTask"] = task ctx.Data["CloneAddr"] = safeURL(cfg.CloneAddr) + ctx.Data["Failed"] = task.Status == structs.TaskStatusFailed ctx.HTML(http.StatusOK, tplMigrating) return } @@ -739,7 +754,7 @@ func renderCode(ctx *context.Context) { ctx.HTML(http.StatusOK, tplRepoHome) } -// RenderUserCards render a page show users according the input templaet +// RenderUserCards render a page show users according the input template func RenderUserCards(ctx *context.Context, total int, getter func(opts models.ListOptions) ([]*models.User, error), tpl base.TplName) { page := ctx.QueryInt("page") if page <= 0 { diff --git a/routers/repo/webhook.go b/routers/web/repo/webhook.go similarity index 97% rename from routers/repo/webhook.go rename to routers/web/repo/webhook.go index fe16d249eb0c..e8d86db51d7d 100644 --- a/routers/repo/webhook.go +++ b/routers/web/repo/webhook.go @@ -239,7 +239,7 @@ func GogsHooksNewPost(ctx *context.Context) { } // newGogsWebhookPost response for creating gogs hook -func newGogsWebhookPost(ctx *context.Context, form forms.NewGogshookForm, kind models.HookTaskType) { +func newGogsWebhookPost(ctx *context.Context, form forms.NewGogshookForm, kind models.HookType) { ctx.Data["Title"] = ctx.Tr("repo.settings.add_webhook") ctx.Data["PageIsSettingsHooks"] = true ctx.Data["PageIsSettingsHooksNew"] = true @@ -1085,28 +1085,30 @@ func TestWebhook(ctx *context.Context) { } apiUser := convert.ToUserWithAccessMode(ctx.User, models.AccessModeNone) - p := &api.PushPayload{ - Ref: git.BranchPrefix + ctx.Repo.Repository.DefaultBranch, - Before: commit.ID.String(), - After: commit.ID.String(), - Commits: []*api.PayloadCommit{ - { - ID: commit.ID.String(), - Message: commit.Message(), - URL: ctx.Repo.Repository.HTMLURL() + "/commit/" + commit.ID.String(), - Author: &api.PayloadUser{ - Name: commit.Author.Name, - Email: commit.Author.Email, - }, - Committer: &api.PayloadUser{ - Name: commit.Committer.Name, - Email: commit.Committer.Email, - }, - }, + + apiCommit := &api.PayloadCommit{ + ID: commit.ID.String(), + Message: commit.Message(), + URL: ctx.Repo.Repository.HTMLURL() + "/commit/" + commit.ID.String(), + Author: &api.PayloadUser{ + Name: commit.Author.Name, + Email: commit.Author.Email, + }, + Committer: &api.PayloadUser{ + Name: commit.Committer.Name, + Email: commit.Committer.Email, }, - Repo: convert.ToRepo(ctx.Repo.Repository, models.AccessModeNone), - Pusher: apiUser, - Sender: apiUser, + } + + p := &api.PushPayload{ + Ref: git.BranchPrefix + ctx.Repo.Repository.DefaultBranch, + Before: commit.ID.String(), + After: commit.ID.String(), + Commits: []*api.PayloadCommit{apiCommit}, + HeadCommit: apiCommit, + Repo: convert.ToRepo(ctx.Repo.Repository, models.AccessModeNone), + Pusher: apiUser, + Sender: apiUser, } if err := webhook.PrepareWebhook(w, ctx.Repo.Repository, models.HookEventPush, p); err != nil { ctx.Flash.Error("PrepareWebhook: " + err.Error()) diff --git a/routers/repo/wiki.go b/routers/web/repo/wiki.go similarity index 98% rename from routers/repo/wiki.go rename to routers/web/repo/wiki.go index 1bdd06dce57d..5271fe9b4ad9 100644 --- a/routers/repo/wiki.go +++ b/routers/web/repo/wiki.go @@ -21,9 +21,11 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/markup/markdown" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" + "code.gitea.io/gitea/routers/common" "code.gitea.io/gitea/services/forms" wiki_service "code.gitea.io/gitea/services/wiki" ) @@ -315,7 +317,7 @@ func renderRevisionPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) ctx.Data["Commits"] = commitsHistory - pager := context.NewPagination(int(commitsCount), git.CommitsRangeSize, page, 5) + pager := context.NewPagination(int(commitsCount), setting.Git.CommitsRangeSize, page, 5) pager.SetDefaultParams(ctx) ctx.Data["Page"] = pager @@ -558,7 +560,7 @@ func WikiRaw(ctx *context.Context) { } if entry != nil { - if err = ServeBlob(ctx, entry.Blob()); err != nil { + if err = common.ServeBlob(ctx, entry.Blob()); err != nil { ctx.ServerError("ServeBlob", err) } return diff --git a/routers/repo/wiki_test.go b/routers/web/repo/wiki_test.go similarity index 98% rename from routers/repo/wiki_test.go rename to routers/web/repo/wiki_test.go index 4b28a5af86e0..8934a6619f0f 100644 --- a/routers/repo/wiki_test.go +++ b/routers/web/repo/wiki_test.go @@ -64,7 +64,7 @@ func assertPagesMetas(t *testing.T, expectedNames []string, metas interface{}) { if !assert.True(t, ok) { return } - if !assert.EqualValues(t, len(expectedNames), len(pageMetas)) { + if !assert.Len(t, pageMetas, len(expectedNames)) { return } for i, pageMeta := range pageMetas { diff --git a/routers/swagger_json.go b/routers/web/swagger_json.go similarity index 97% rename from routers/swagger_json.go rename to routers/web/swagger_json.go index 78c7fb1e24b3..82d72698c606 100644 --- a/routers/swagger_json.go +++ b/routers/web/swagger_json.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package routers +package web import ( "net/http" diff --git a/routers/user/auth.go b/routers/web/user/auth.go similarity index 96% rename from routers/user/auth.go rename to routers/web/user/auth.go index f29e1cc4d0af..4095d2956e3a 100644 --- a/routers/user/auth.go +++ b/routers/web/user/auth.go @@ -617,7 +617,7 @@ func SignInOAuthCallback(ctx *context.Context) { } if u == nil { - if setting.OAuth2Client.EnableAutoRegistration { + if !(setting.Service.DisableRegistration || setting.Service.AllowOnlyInternalRegistration) && setting.OAuth2Client.EnableAutoRegistration { // create new user with details from oauth2 provider var missingFields []string if gothUser.UserID == "" { @@ -828,6 +828,7 @@ func LinkAccount(ctx *context.Context) { ctx.Data["RecaptchaSitekey"] = setting.Service.RecaptchaSitekey ctx.Data["HcaptchaSitekey"] = setting.Service.HcaptchaSitekey ctx.Data["DisableRegistration"] = setting.Service.DisableRegistration + ctx.Data["AllowOnlyInternalRegistration"] = setting.Service.AllowOnlyInternalRegistration ctx.Data["ShowRegistrationButton"] = false // use this to set the right link into the signIn and signUp templates in the link_account template @@ -982,18 +983,23 @@ func LinkAccountPostRegister(ctx *context.Context) { ctx.Data["SignInLink"] = setting.AppSubURL + "/user/link_account_signin" ctx.Data["SignUpLink"] = setting.AppSubURL + "/user/link_account_signup" - gothUser := ctx.Session.Get("linkAccountGothUser") - if gothUser == nil { + gothUserInterface := ctx.Session.Get("linkAccountGothUser") + if gothUserInterface == nil { ctx.ServerError("UserSignUp", errors.New("not in LinkAccount session")) return } + gothUser, ok := gothUserInterface.(goth.User) + if !ok { + ctx.ServerError("UserSignUp", fmt.Errorf("session linkAccountGothUser type is %t but not goth.User", gothUserInterface)) + return + } if ctx.HasError() { ctx.HTML(http.StatusOK, tplLinkAccount) return } - if setting.Service.DisableRegistration { + if setting.Service.DisableRegistration || setting.Service.AllowOnlyInternalRegistration { ctx.Error(http.StatusForbidden) return } @@ -1005,9 +1011,9 @@ func LinkAccountPostRegister(ctx *context.Context) { case setting.ImageCaptcha: valid = context.GetImageCaptcha().VerifyReq(ctx.Req) case setting.ReCaptcha: - valid, err = recaptcha.Verify(ctx.Req.Context(), form.GRecaptchaResponse) + valid, err = recaptcha.Verify(ctx, form.GRecaptchaResponse) case setting.HCaptcha: - valid, err = hcaptcha.Verify(ctx.Req.Context(), form.HcaptchaResponse) + valid, err = hcaptcha.Verify(ctx, form.HcaptchaResponse) default: ctx.ServerError("Unknown Captcha Type", fmt.Errorf("Unknown Captcha Type: %s", setting.Service.CaptchaType)) return @@ -1048,7 +1054,7 @@ func LinkAccountPostRegister(ctx *context.Context) { } } - loginSource, err := models.GetActiveOAuth2LoginSourceByName(gothUser.(goth.User).Provider) + loginSource, err := models.GetActiveOAuth2LoginSourceByName(gothUser.Provider) if err != nil { ctx.ServerError("CreateUser", err) } @@ -1060,10 +1066,10 @@ func LinkAccountPostRegister(ctx *context.Context) { IsActive: !(setting.Service.RegisterEmailConfirm || setting.Service.RegisterManualConfirm), LoginType: models.LoginOAuth2, LoginSource: loginSource.ID, - LoginName: gothUser.(goth.User).UserID, + LoginName: gothUser.UserID, } - if !createAndHandleCreatedUser(ctx, tplLinkAccount, form, u, gothUser.(*goth.User), false) { + if !createAndHandleCreatedUser(ctx, tplLinkAccount, form, u, &gothUser, false) { // error already handled return } @@ -1147,9 +1153,9 @@ func SignUpPost(ctx *context.Context) { case setting.ImageCaptcha: valid = context.GetImageCaptcha().VerifyReq(ctx.Req) case setting.ReCaptcha: - valid, err = recaptcha.Verify(ctx.Req.Context(), form.GRecaptchaResponse) + valid, err = recaptcha.Verify(ctx, form.GRecaptchaResponse) case setting.HCaptcha: - valid, err = hcaptcha.Verify(ctx.Req.Context(), form.HcaptchaResponse) + valid, err = hcaptcha.Verify(ctx, form.HcaptchaResponse) default: ctx.ServerError("Unknown Captcha Type", fmt.Errorf("Unknown Captcha Type: %s", setting.Service.CaptchaType)) return @@ -1185,7 +1191,7 @@ func SignUpPost(ctx *context.Context) { ctx.RenderWithErr(password.BuildComplexityError(ctx), tplSignUp, &form) return } - pwned, err := password.IsPwned(ctx.Req.Context(), form.Password) + pwned, err := password.IsPwned(ctx, form.Password) if pwned { errMsg := ctx.Tr("auth.password_pwned") if err != nil { @@ -1423,16 +1429,22 @@ func handleAccountActivation(ctx *context.Context, user *models.User) { return } + if err := models.ActivateUserEmail(user.ID, user.Email, true); err != nil { + log.Error("Unable to activate email for user: %-v with email: %s: %v", user, user.Email, err) + ctx.ServerError("ActivateUserEmail", err) + return + } + log.Trace("User activated: %s", user.Name) if err := ctx.Session.Set("uid", user.ID); err != nil { - log.Error(fmt.Sprintf("Error setting uid in session: %v", err)) + log.Error("Error setting uid in session[%s]: %v", ctx.Session.ID(), err) } if err := ctx.Session.Set("uname", user.Name); err != nil { - log.Error(fmt.Sprintf("Error setting uname in session: %v", err)) + log.Error("Error setting uname in session[%s]: %v", ctx.Session.ID(), err) } if err := ctx.Session.Release(); err != nil { - log.Error("Error storing session: %v", err) + log.Error("Error storing session[%s]: %v", ctx.Session.ID(), err) } ctx.Flash.Success(ctx.Tr("auth.account_activated")) @@ -1467,11 +1479,12 @@ func ActivateEmail(ctx *context.Context) { ctx.Redirect(setting.AppSubURL + "/user/settings/account") } -// ForgotPasswd render the forget pasword page +// ForgotPasswd render the forget password page func ForgotPasswd(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("auth.forgot_password_title") if setting.MailService == nil { + log.Warn(ctx.Tr("auth.disable_forgot_password_mail_admin")) ctx.Data["IsResetDisable"] = true ctx.HTML(http.StatusOK, tplForgotPassword) return @@ -1614,7 +1627,7 @@ func ResetPasswdPost(ctx *context.Context) { ctx.Data["Err_Password"] = true ctx.RenderWithErr(password.BuildComplexityError(ctx), tplResetPassword, nil) return - } else if pwned, err := password.IsPwned(ctx.Req.Context(), passwd); pwned || err != nil { + } else if pwned, err := password.IsPwned(ctx, passwd); pwned || err != nil { errMsg := ctx.Tr("auth.password_pwned") if err != nil { log.Error(err.Error()) diff --git a/routers/user/auth_openid.go b/routers/web/user/auth_openid.go similarity index 95% rename from routers/user/auth_openid.go rename to routers/web/user/auth_openid.go index 863fa6718477..1a73a08c4862 100644 --- a/routers/user/auth_openid.go +++ b/routers/web/user/auth_openid.go @@ -13,11 +13,11 @@ import ( "code.gitea.io/gitea/modules/auth/openid" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" - "code.gitea.io/gitea/modules/generate" "code.gitea.io/gitea/modules/hcaptcha" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/recaptcha" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/modules/web/middleware" "code.gitea.io/gitea/services/forms" @@ -249,7 +249,7 @@ func signInOpenIDVerify(ctx *context.Context) { log.Error("signInOpenIDVerify: Unable to save changes to the session: %v", err) } - if u != nil || !setting.Service.EnableOpenIDSignUp { + if u != nil || !setting.Service.EnableOpenIDSignUp || setting.Service.AllowOnlyInternalRegistration { ctx.Redirect(setting.AppSubURL + "/user/openid/connect") } else { ctx.Redirect(setting.AppSubURL + "/user/openid/register") @@ -267,6 +267,7 @@ func ConnectOpenID(ctx *context.Context) { ctx.Data["PageIsSignIn"] = true ctx.Data["PageIsOpenIDConnect"] = true ctx.Data["EnableOpenIDSignUp"] = setting.Service.EnableOpenIDSignUp + ctx.Data["AllowOnlyInternalRegistration"] = setting.Service.AllowOnlyInternalRegistration ctx.Data["OpenID"] = oid userName, _ := ctx.Session.Get("openid_determined_username").(string) if userName != "" { @@ -328,6 +329,7 @@ func RegisterOpenID(ctx *context.Context) { ctx.Data["PageIsSignIn"] = true ctx.Data["PageIsOpenIDRegister"] = true ctx.Data["EnableOpenIDSignUp"] = setting.Service.EnableOpenIDSignUp + ctx.Data["AllowOnlyInternalRegistration"] = setting.Service.AllowOnlyInternalRegistration ctx.Data["EnableCaptcha"] = setting.Service.EnableCaptcha ctx.Data["Captcha"] = context.GetImageCaptcha() ctx.Data["CaptchaType"] = setting.Service.CaptchaType @@ -367,6 +369,11 @@ func RegisterOpenIDPost(ctx *context.Context) { ctx.Data["HcaptchaSitekey"] = setting.Service.HcaptchaSitekey ctx.Data["OpenID"] = oid + if setting.Service.AllowOnlyInternalRegistration { + ctx.Error(http.StatusForbidden) + return + } + if setting.Service.EnableCaptcha { var valid bool var err error @@ -378,13 +385,13 @@ func RegisterOpenIDPost(ctx *context.Context) { ctx.ServerError("", err) return } - valid, err = recaptcha.Verify(ctx.Req.Context(), form.GRecaptchaResponse) + valid, err = recaptcha.Verify(ctx, form.GRecaptchaResponse) case setting.HCaptcha: if err := ctx.Req.ParseForm(); err != nil { ctx.ServerError("", err) return } - valid, err = hcaptcha.Verify(ctx.Req.Context(), form.HcaptchaResponse) + valid, err = hcaptcha.Verify(ctx, form.HcaptchaResponse) default: ctx.ServerError("Unknown Captcha Type", fmt.Errorf("Unknown Captcha Type: %s", setting.Service.CaptchaType)) return @@ -404,7 +411,7 @@ func RegisterOpenIDPost(ctx *context.Context) { if length < 256 { length = 256 } - password, err := generate.GetRandomString(length) + password, err := util.RandomString(int64(length)) if err != nil { ctx.RenderWithErr(err.Error(), tplSignUpOID, form) return diff --git a/routers/user/avatar.go b/routers/web/user/avatar.go similarity index 100% rename from routers/user/avatar.go rename to routers/web/user/avatar.go diff --git a/routers/user/home.go b/routers/web/user/home.go similarity index 99% rename from routers/user/home.go rename to routers/web/user/home.go index 82ccb805b0e2..5f06373c16f5 100644 --- a/routers/user/home.go +++ b/routers/web/user/home.go @@ -49,11 +49,12 @@ func getDashboardContextUser(ctx *context.Context) *models.User { } ctx.Data["ContextUser"] = ctxUser - if err := ctx.User.GetOrganizations(&models.SearchOrganizationsOptions{All: true}); err != nil { - ctx.ServerError("GetOrganizations", err) + orgs, err := models.GetUserOrgsList(ctx.User.ID) + if err != nil { + ctx.ServerError("GetUserOrgsList", err) return nil } - ctx.Data["Orgs"] = ctx.User.Orgs + ctx.Data["Orgs"] = orgs return ctxUser } diff --git a/routers/user/home_test.go b/routers/web/user/home_test.go similarity index 97% rename from routers/user/home_test.go rename to routers/web/user/home_test.go index ecc02fd33a83..b0109c354f43 100644 --- a/routers/user/home_test.go +++ b/routers/web/user/home_test.go @@ -33,9 +33,9 @@ func TestArchivedIssues(t *testing.T) { IsArchived[repo.ID] = repo.IsArchived NumIssues[repo.ID] = repo.NumIssues } - assert.EqualValues(t, false, IsArchived[50]) + assert.False(t, IsArchived[50]) assert.EqualValues(t, 1, NumIssues[50]) - assert.EqualValues(t, true, IsArchived[51]) + assert.True(t, IsArchived[51]) assert.EqualValues(t, 1, NumIssues[51]) // Act diff --git a/routers/user/main_test.go b/routers/web/user/main_test.go similarity index 84% rename from routers/user/main_test.go rename to routers/web/user/main_test.go index ed0724dc7733..be17dd1f3135 100644 --- a/routers/user/main_test.go +++ b/routers/web/user/main_test.go @@ -12,5 +12,5 @@ import ( ) func TestMain(m *testing.M) { - models.MainTest(m, filepath.Join("..", "..")) + models.MainTest(m, filepath.Join("..", "..", "..")) } diff --git a/routers/user/notification.go b/routers/web/user/notification.go similarity index 100% rename from routers/user/notification.go rename to routers/web/user/notification.go diff --git a/routers/user/oauth.go b/routers/web/user/oauth.go similarity index 78% rename from routers/user/oauth.go rename to routers/web/user/oauth.go index ae06efd0c016..72295b4447c2 100644 --- a/routers/user/oauth.go +++ b/routers/web/user/oauth.go @@ -13,16 +13,19 @@ import ( "strings" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/auth/oauth2" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/web" + "code.gitea.io/gitea/services/auth" "code.gitea.io/gitea/services/forms" "gitea.com/go-chi/binding" "github.com/dgrijalva/jwt-go" + jsoniter "github.com/json-iterator/go" ) const ( @@ -93,6 +96,24 @@ func (err AccessTokenError) Error() string { return fmt.Sprintf("%s: %s", err.ErrorCode, err.ErrorDescription) } +// BearerTokenErrorCode represents an error code specified in RFC 6750 +type BearerTokenErrorCode string + +const ( + // BearerTokenErrorCodeInvalidRequest represents an error code specified in RFC 6750 + BearerTokenErrorCodeInvalidRequest BearerTokenErrorCode = "invalid_request" + // BearerTokenErrorCodeInvalidToken represents an error code specified in RFC 6750 + BearerTokenErrorCodeInvalidToken BearerTokenErrorCode = "invalid_token" + // BearerTokenErrorCodeInsufficientScope represents an error code specified in RFC 6750 + BearerTokenErrorCodeInsufficientScope BearerTokenErrorCode = "insufficient_scope" +) + +// BearerTokenError represents an error response specified in RFC 6750 +type BearerTokenError struct { + ErrorCode BearerTokenErrorCode `json:"error" form:"error"` + ErrorDescription string `json:"error_description"` +} + // TokenType specifies the kind of token type TokenType string @@ -112,7 +133,7 @@ type AccessTokenResponse struct { IDToken string `json:"id_token,omitempty"` } -func newAccessTokenResponse(grant *models.OAuth2Grant, clientSecret string) (*AccessTokenResponse, *AccessTokenError) { +func newAccessTokenResponse(grant *models.OAuth2Grant, signingKey oauth2.JWTSigningKey) (*AccessTokenResponse, *AccessTokenError) { if setting.OAuth2.InvalidateRefreshTokens { if err := grant.IncreaseCounter(); err != nil { return nil, &AccessTokenError{ @@ -166,6 +187,21 @@ func newAccessTokenResponse(grant *models.OAuth2Grant, clientSecret string) (*Ac ErrorDescription: "cannot find application", } } + err = app.LoadUser() + if err != nil { + if models.IsErrUserNotExist(err) { + return nil, &AccessTokenError{ + ErrorCode: AccessTokenErrorCodeInvalidRequest, + ErrorDescription: "cannot find user", + } + } + log.Error("Error loading user: %v", err) + return nil, &AccessTokenError{ + ErrorCode: AccessTokenErrorCodeInvalidRequest, + ErrorDescription: "server error", + } + } + idToken := &models.OIDCToken{ StandardClaims: jwt.StandardClaims{ ExpiresAt: expirationDate.AsTime().Unix(), @@ -175,7 +211,21 @@ func newAccessTokenResponse(grant *models.OAuth2Grant, clientSecret string) (*Ac }, Nonce: grant.Nonce, } - signedIDToken, err = idToken.SignToken(clientSecret) + if grant.ScopeContains("profile") { + idToken.Name = app.User.FullName + idToken.PreferredUsername = app.User.Name + idToken.Profile = app.User.HTMLURL() + idToken.Picture = app.User.AvatarLink() + idToken.Website = app.User.Website + idToken.Locale = app.User.Language + idToken.UpdatedAt = app.User.UpdatedUnix + } + if grant.ScopeContains("email") { + idToken.Email = app.User.Email + idToken.EmailVerified = app.User.IsActive + } + + signedIDToken, err = idToken.SignToken(signingKey) if err != nil { return nil, &AccessTokenError{ ErrorCode: AccessTokenErrorCodeInvalidRequest, @@ -193,6 +243,45 @@ func newAccessTokenResponse(grant *models.OAuth2Grant, clientSecret string) (*Ac }, nil } +type userInfoResponse struct { + Sub string `json:"sub"` + Name string `json:"name"` + Username string `json:"preferred_username"` + Email string `json:"email"` + Picture string `json:"picture"` +} + +// InfoOAuth manages request for userinfo endpoint +func InfoOAuth(ctx *context.Context) { + header := ctx.Req.Header.Get("Authorization") + auths := strings.Fields(header) + if len(auths) != 2 || auths[0] != "Bearer" { + ctx.HandleText(http.StatusUnauthorized, "no valid auth token authorization") + return + } + uid := auth.CheckOAuthAccessToken(auths[1]) + if uid == 0 { + handleBearerTokenError(ctx, BearerTokenError{ + ErrorCode: BearerTokenErrorCodeInvalidToken, + ErrorDescription: "Access token not assigned to any user", + }) + return + } + authUser, err := models.GetUserByID(uid) + if err != nil { + ctx.ServerError("GetUserByID", err) + return + } + response := &userInfoResponse{ + Sub: fmt.Sprint(authUser.ID), + Name: authUser.FullName, + Username: authUser.Name, + Email: authUser.Email, + Picture: authUser.AvatarLink(), + } + ctx.JSON(http.StatusOK, response) +} + // AuthorizeOAuth manages authorize requests func AuthorizeOAuth(ctx *context.Context) { form := web.GetForm(ctx).(*forms.AuthorizationForm) @@ -393,12 +482,37 @@ func GrantApplicationOAuth(ctx *context.Context) { func OIDCWellKnown(ctx *context.Context) { t := ctx.Render.TemplateLookup("user/auth/oidc_wellknown") ctx.Resp.Header().Set("Content-Type", "application/json") + ctx.Data["SigningKey"] = oauth2.DefaultSigningKey if err := t.Execute(ctx.Resp, ctx.Data); err != nil { log.Error("%v", err) ctx.Error(http.StatusInternalServerError) } } +// OIDCKeys generates the JSON Web Key Set +func OIDCKeys(ctx *context.Context) { + jwk, err := oauth2.DefaultSigningKey.ToJWK() + if err != nil { + log.Error("Error converting signing key to JWK: %v", err) + ctx.Error(http.StatusInternalServerError) + return + } + + jwk["use"] = "sig" + + jwks := map[string][]map[string]string{ + "keys": { + jwk, + }, + } + + ctx.Resp.Header().Set("Content-Type", "application/json") + enc := jsoniter.NewEncoder(ctx.Resp) + if err := enc.Encode(jwks); err != nil { + log.Error("Failed to encode representation as json. Error: %v", err) + } +} + // AccessTokenOAuth manages all access token requests by the client func AccessTokenOAuth(ctx *context.Context) { form := *web.GetForm(ctx).(*forms.AccessTokenForm) @@ -426,13 +540,25 @@ func AccessTokenOAuth(ctx *context.Context) { form.ClientSecret = pair[1] } } + + signingKey := oauth2.DefaultSigningKey + if signingKey.IsSymmetric() { + clientKey, err := oauth2.CreateJWTSingingKey(signingKey.SigningMethod().Alg(), []byte(form.ClientSecret)) + if err != nil { + handleAccessTokenError(ctx, AccessTokenError{ + ErrorCode: AccessTokenErrorCodeInvalidRequest, + ErrorDescription: "Error creating signing key", + }) + return + } + signingKey = clientKey + } + switch form.GrantType { case "refresh_token": - handleRefreshToken(ctx, form) - return + handleRefreshToken(ctx, form, signingKey) case "authorization_code": - handleAuthorizationCode(ctx, form) - return + handleAuthorizationCode(ctx, form, signingKey) default: handleAccessTokenError(ctx, AccessTokenError{ ErrorCode: AccessTokenErrorCodeUnsupportedGrantType, @@ -441,7 +567,7 @@ func AccessTokenOAuth(ctx *context.Context) { } } -func handleRefreshToken(ctx *context.Context, form forms.AccessTokenForm) { +func handleRefreshToken(ctx *context.Context, form forms.AccessTokenForm, signingKey oauth2.JWTSigningKey) { token, err := models.ParseOAuth2Token(form.RefreshToken) if err != nil { handleAccessTokenError(ctx, AccessTokenError{ @@ -469,7 +595,7 @@ func handleRefreshToken(ctx *context.Context, form forms.AccessTokenForm) { log.Warn("A client tried to use a refresh token for grant_id = %d was used twice!", grant.ID) return } - accessToken, tokenErr := newAccessTokenResponse(grant, form.ClientSecret) + accessToken, tokenErr := newAccessTokenResponse(grant, signingKey) if tokenErr != nil { handleAccessTokenError(ctx, *tokenErr) return @@ -477,7 +603,7 @@ func handleRefreshToken(ctx *context.Context, form forms.AccessTokenForm) { ctx.JSON(http.StatusOK, accessToken) } -func handleAuthorizationCode(ctx *context.Context, form forms.AccessTokenForm) { +func handleAuthorizationCode(ctx *context.Context, form forms.AccessTokenForm, signingKey oauth2.JWTSigningKey) { app, err := models.GetOAuth2ApplicationByClientID(form.ClientID) if err != nil { handleAccessTokenError(ctx, AccessTokenError{ @@ -531,7 +657,7 @@ func handleAuthorizationCode(ctx *context.Context, form forms.AccessTokenForm) { ErrorDescription: "cannot proceed your request", }) } - resp, tokenErr := newAccessTokenResponse(authorizationCode.Grant, form.ClientSecret) + resp, tokenErr := newAccessTokenResponse(authorizationCode.Grant, signingKey) if tokenErr != nil { handleAccessTokenError(ctx, *tokenErr) return @@ -571,3 +697,18 @@ func handleAuthorizeError(ctx *context.Context, authErr AuthorizeError, redirect redirect.RawQuery = q.Encode() ctx.Redirect(redirect.String(), 302) } + +func handleBearerTokenError(ctx *context.Context, beErr BearerTokenError) { + ctx.Resp.Header().Set("WWW-Authenticate", fmt.Sprintf("Bearer realm=\"\", error=\"%s\", error_description=\"%s\"", beErr.ErrorCode, beErr.ErrorDescription)) + switch beErr.ErrorCode { + case BearerTokenErrorCodeInvalidRequest: + ctx.JSON(http.StatusBadRequest, beErr) + case BearerTokenErrorCodeInvalidToken: + ctx.JSON(http.StatusUnauthorized, beErr) + case BearerTokenErrorCodeInsufficientScope: + ctx.JSON(http.StatusForbidden, beErr) + default: + log.Error("Invalid BearerTokenErrorCode: %v", beErr.ErrorCode) + ctx.ServerError("Unhandled BearerTokenError", fmt.Errorf("BearerTokenError: error=\"%v\", error_description=\"%v\"", beErr.ErrorCode, beErr.ErrorDescription)) + } +} diff --git a/routers/user/profile.go b/routers/web/user/profile.go similarity index 97% rename from routers/user/profile.go rename to routers/web/user/profile.go index 8ff1ee24adc8..631ca2113512 100644 --- a/routers/user/profile.go +++ b/routers/web/user/profile.go @@ -17,7 +17,7 @@ import ( "code.gitea.io/gitea/modules/markup/markdown" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" - "code.gitea.io/gitea/routers/org" + "code.gitea.io/gitea/routers/web/org" ) // GetUserByName get user by name @@ -75,6 +75,17 @@ func Profile(ctx *context.Context) { return } + if ctxUser.IsOrganization() { + org.Home(ctx) + return + } + + // check view permissions + if !ctxUser.IsVisibleToUser(ctx.User) { + ctx.NotFound("user", fmt.Errorf(uname)) + return + } + // Show SSH keys. if isShowKeys { ShowSSHKeys(ctx, ctxUser.ID) @@ -87,11 +98,6 @@ func Profile(ctx *context.Context) { return } - if ctxUser.IsOrganization() { - org.Home(ctx) - return - } - // Show OpenID URIs openIDs, err := models.GetUserOpenIDs(ctxUser.ID) if err != nil { @@ -117,6 +123,7 @@ func Profile(ctx *context.Context) { content, err := markdown.RenderString(&markup.RenderContext{ URLPrefix: ctx.Repo.RepoLink, Metas: map[string]string{"mode": "document"}, + GitRepo: ctx.Repo.GitRepo, }, ctxUser.Description) if err != nil { ctx.ServerError("RenderString", err) diff --git a/routers/user/setting/account.go b/routers/web/user/setting/account.go similarity index 90% rename from routers/user/setting/account.go rename to routers/web/user/setting/account.go index e12d63ee029e..b805db620083 100644 --- a/routers/user/setting/account.go +++ b/routers/web/user/setting/account.go @@ -58,7 +58,7 @@ func AccountPost(ctx *context.Context) { ctx.Flash.Error(ctx.Tr("form.password_not_match")) } else if !password.IsComplexEnough(form.Password) { ctx.Flash.Error(password.BuildComplexityError(ctx)) - } else if pwned, err := password.IsPwned(ctx.Req.Context(), form.Password); pwned || err != nil { + } else if pwned, err := password.IsPwned(ctx, form.Password); pwned || err != nil { errMsg := ctx.Tr("auth.password_pwned") if err != nil { log.Error(err.Error()) @@ -107,35 +107,36 @@ func EmailPost(ctx *context.Context) { ctx.Redirect(setting.AppSubURL + "/user/settings/account") return } - if ctx.Query("id") == "PRIMARY" { - if ctx.User.IsActive { - log.Error("Send activation: email not set for activation") + + id := ctx.QueryInt64("id") + email, err := models.GetEmailAddressByID(ctx.User.ID, id) + if err != nil { + log.Error("GetEmailAddressByID(%d,%d) error: %v", ctx.User.ID, id, err) + ctx.Redirect(setting.AppSubURL + "/user/settings/account") + return + } + if email == nil { + log.Warn("Send activation failed: EmailAddress[%d] not found for user: %-v", id, ctx.User) + ctx.Redirect(setting.AppSubURL + "/user/settings/account") + return + } + if email.IsActivated { + log.Debug("Send activation failed: email %s is already activated for user: %-v", email.Email, ctx.User) + ctx.Redirect(setting.AppSubURL + "/user/settings/account") + return + } + if email.IsPrimary { + if ctx.User.IsActive && !setting.Service.RegisterEmailConfirm { + log.Debug("Send activation failed: email %s is already activated for user: %-v", email.Email, ctx.User) ctx.Redirect(setting.AppSubURL + "/user/settings/account") return } + // Only fired when the primary email is inactive (Wrong state) mailer.SendActivateAccountMail(ctx.Locale, ctx.User) - address = ctx.User.Email } else { - id := ctx.QueryInt64("id") - email, err := models.GetEmailAddressByID(ctx.User.ID, id) - if err != nil { - log.Error("GetEmailAddressByID(%d,%d) error: %v", ctx.User.ID, id, err) - ctx.Redirect(setting.AppSubURL + "/user/settings/account") - return - } - if email == nil { - log.Error("Send activation: EmailAddress not found; user:%d, id: %d", ctx.User.ID, id) - ctx.Redirect(setting.AppSubURL + "/user/settings/account") - return - } - if email.IsActivated { - log.Error("Send activation: email not set for activation") - ctx.Redirect(setting.AppSubURL + "/user/settings/account") - return - } mailer.SendActivateEmailMail(ctx.User, email) - address = email.Email } + address = email.Email if err := ctx.Cache.Put("MailResendLimit_"+ctx.User.LowerName, ctx.User.LowerName, 180); err != nil { log.Error("Set cache(MailResendLimit) fail: %v", err) diff --git a/routers/user/setting/account_test.go b/routers/web/user/setting/account_test.go similarity index 100% rename from routers/user/setting/account_test.go rename to routers/web/user/setting/account_test.go diff --git a/routers/user/setting/adopt.go b/routers/web/user/setting/adopt.go similarity index 100% rename from routers/user/setting/adopt.go rename to routers/web/user/setting/adopt.go diff --git a/routers/user/setting/applications.go b/routers/web/user/setting/applications.go similarity index 100% rename from routers/user/setting/applications.go rename to routers/web/user/setting/applications.go diff --git a/routers/user/setting/keys.go b/routers/web/user/setting/keys.go similarity index 78% rename from routers/user/setting/keys.go rename to routers/web/user/setting/keys.go index e56a33afcb34..d875d84a7603 100644 --- a/routers/user/setting/keys.go +++ b/routers/web/user/setting/keys.go @@ -76,7 +76,13 @@ func KeysPost(ctx *context.Context) { ctx.Flash.Success(ctx.Tr("settings.add_principal_success", form.Content)) ctx.Redirect(setting.AppSubURL + "/user/settings/keys") case "gpg": - keys, err := models.AddGPGKey(ctx.User.ID, form.Content) + token := models.VerificationToken(ctx.User, 1) + lastToken := models.VerificationToken(ctx.User, 0) + + keys, err := models.AddGPGKey(ctx.User.ID, form.Content, token, form.Signature) + if err != nil && models.IsErrGPGInvalidTokenSignature(err) { + keys, err = models.AddGPGKey(ctx.User.ID, form.Content, lastToken, form.Signature) + } if err != nil { ctx.Data["HasGPGError"] = true switch { @@ -88,10 +94,18 @@ func KeysPost(ctx *context.Context) { ctx.Data["Err_Content"] = true ctx.RenderWithErr(ctx.Tr("settings.gpg_key_id_used"), tplSettingsKeys, &form) + case models.IsErrGPGInvalidTokenSignature(err): + loadKeysData(ctx) + ctx.Data["Err_Content"] = true + ctx.Data["Err_Signature"] = true + ctx.Data["KeyID"] = err.(models.ErrGPGInvalidTokenSignature).ID + ctx.RenderWithErr(ctx.Tr("settings.gpg_invalid_token_signature"), tplSettingsKeys, &form) case models.IsErrGPGNoEmailFound(err): loadKeysData(ctx) ctx.Data["Err_Content"] = true + ctx.Data["Err_Signature"] = true + ctx.Data["KeyID"] = err.(models.ErrGPGNoEmailFound).ID ctx.RenderWithErr(ctx.Tr("settings.gpg_no_key_email_found"), tplSettingsKeys, &form) default: ctx.ServerError("AddPublicKey", err) @@ -108,6 +122,29 @@ func KeysPost(ctx *context.Context) { } ctx.Flash.Success(ctx.Tr("settings.add_gpg_key_success", keyIDs)) ctx.Redirect(setting.AppSubURL + "/user/settings/keys") + case "verify_gpg": + token := models.VerificationToken(ctx.User, 1) + lastToken := models.VerificationToken(ctx.User, 0) + + keyID, err := models.VerifyGPGKey(ctx.User.ID, form.KeyID, token, form.Signature) + if err != nil && models.IsErrGPGInvalidTokenSignature(err) { + keyID, err = models.VerifyGPGKey(ctx.User.ID, form.KeyID, lastToken, form.Signature) + } + if err != nil { + ctx.Data["HasGPGVerifyError"] = true + switch { + case models.IsErrGPGInvalidTokenSignature(err): + loadKeysData(ctx) + ctx.Data["VerifyingID"] = form.KeyID + ctx.Data["Err_Signature"] = true + ctx.Data["KeyID"] = err.(models.ErrGPGInvalidTokenSignature).ID + ctx.RenderWithErr(ctx.Tr("settings.gpg_invalid_token_signature"), tplSettingsKeys, &form) + default: + ctx.ServerError("VerifyGPG", err) + } + } + ctx.Flash.Success(ctx.Tr("settings.verify_gpg_key_success", keyID)) + ctx.Redirect(setting.AppSubURL + "/user/settings/keys") case "ssh": content, err := models.CheckPublicKeyString(form.Content) if err != nil { @@ -216,6 +253,10 @@ func loadKeysData(ctx *context.Context) { return } ctx.Data["GPGKeys"] = gpgkeys + tokenToSign := models.VerificationToken(ctx.User, 1) + + // generate a new aes cipher using the csrfToken + ctx.Data["TokenToSign"] = tokenToSign principals, err := models.ListPrincipalKeys(ctx.User.ID, models.ListOptions{}) if err != nil { @@ -223,4 +264,6 @@ func loadKeysData(ctx *context.Context) { return } ctx.Data["Principals"] = principals + + ctx.Data["VerifyingID"] = ctx.Query("verify_gpg") } diff --git a/routers/admin/main_test.go b/routers/web/user/setting/main_test.go similarity index 78% rename from routers/admin/main_test.go rename to routers/web/user/setting/main_test.go index 9a7191d471f5..daa3f7fe5bf1 100644 --- a/routers/admin/main_test.go +++ b/routers/web/user/setting/main_test.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package admin +package setting import ( "path/filepath" @@ -12,5 +12,5 @@ import ( ) func TestMain(m *testing.M) { - models.MainTest(m, filepath.Join("..", "..")) + models.MainTest(m, filepath.Join("..", "..", "..", "..")) } diff --git a/routers/user/setting/oauth2.go b/routers/web/user/setting/oauth2.go similarity index 100% rename from routers/user/setting/oauth2.go rename to routers/web/user/setting/oauth2.go diff --git a/routers/user/setting/profile.go b/routers/web/user/setting/profile.go similarity index 94% rename from routers/user/setting/profile.go rename to routers/web/user/setting/profile.go index 0bc2b4ee36b9..682f9205784e 100644 --- a/routers/user/setting/profile.go +++ b/routers/web/user/setting/profile.go @@ -19,6 +19,7 @@ import ( "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/typesniffer" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/modules/web/middleware" @@ -37,6 +38,7 @@ const ( func Profile(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("settings") ctx.Data["PageIsSettingsProfile"] = true + ctx.Data["AllowedUserVisibilityModes"] = setting.Service.AllowedUserVisibilityModesSlice.ToVisibleTypeSlice() ctx.HTML(http.StatusOK, tplSettingsProfile) } @@ -68,8 +70,13 @@ func HandleUsernameChange(ctx *context.Context, user *models.User, newName strin } return err } - log.Trace("User name changed: %s -> %s", user.Name, newName) + } else { + if err := models.UpdateRepositoryOwnerNames(user.ID, newName); err != nil { + ctx.ServerError("UpdateRepository", err) + return err + } } + log.Trace("User name changed: %s -> %s", user.Name, newName) return nil } @@ -85,6 +92,7 @@ func ProfilePost(ctx *context.Context) { } if len(form.Name) != 0 && ctx.User.Name != form.Name { + log.Debug("Changing name for %s to %s", ctx.User.Name, form.Name) if err := HandleUsernameChange(ctx, ctx.User, form.Name); err != nil { ctx.Redirect(setting.AppSubURL + "/user/settings") return @@ -107,6 +115,7 @@ func ProfilePost(ctx *context.Context) { } ctx.User.Description = form.Description ctx.User.KeepActivityPrivate = form.KeepActivityPrivate + ctx.User.Visibility = form.Visibility if err := models.UpdateUserSetting(ctx.User); err != nil { if _, ok := err.(models.ErrEmailAlreadyUsed); ok { ctx.Flash.Error(ctx.Tr("form.email_been_used")) @@ -153,7 +162,9 @@ func UpdateAvatarSetting(ctx *context.Context, form *forms.AvatarForm, ctxUser * if err != nil { return fmt.Errorf("ioutil.ReadAll: %v", err) } - if !base.IsImageFile(data) { + + st := typesniffer.DetectContentType(data) + if !(st.IsImage() && !st.IsSvgImage()) { return errors.New(ctx.Tr("settings.uploaded_avatar_not_a_image")) } if err = ctxUser.UploadAvatar(data); err != nil { diff --git a/routers/user/setting/security.go b/routers/web/user/setting/security.go similarity index 100% rename from routers/user/setting/security.go rename to routers/web/user/setting/security.go diff --git a/routers/user/setting/security_openid.go b/routers/web/user/setting/security_openid.go similarity index 100% rename from routers/user/setting/security_openid.go rename to routers/web/user/setting/security_openid.go diff --git a/routers/user/setting/security_twofa.go b/routers/web/user/setting/security_twofa.go similarity index 100% rename from routers/user/setting/security_twofa.go rename to routers/web/user/setting/security_twofa.go diff --git a/routers/user/setting/security_u2f.go b/routers/web/user/setting/security_u2f.go similarity index 100% rename from routers/user/setting/security_u2f.go rename to routers/web/user/setting/security_u2f.go diff --git a/routers/user/task.go b/routers/web/user/task.go similarity index 55% rename from routers/user/task.go rename to routers/web/user/task.go index b8df5d99c7b4..8e7b66ef9538 100644 --- a/routers/user/task.go +++ b/routers/web/user/task.go @@ -9,6 +9,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" + jsoniter "github.com/json-iterator/go" ) // TaskStatus returns task's status @@ -21,9 +22,24 @@ func TaskStatus(ctx *context.Context) { return } + message := task.Message + + if task.Message != "" && task.Message[0] == '{' { + // assume message is actually a translatable string + json := jsoniter.ConfigCompatibleWithStandardLibrary + var translatableMessage models.TranslatableMessage + if err := json.Unmarshal([]byte(message), &translatableMessage); err != nil { + translatableMessage = models.TranslatableMessage{ + Format: "migrate.migrating_failed.error", + Args: []interface{}{task.Message}, + } + } + message = ctx.Tr(translatableMessage.Format, translatableMessage.Args...) + } + ctx.JSON(http.StatusOK, map[string]interface{}{ "status": task.Status, - "err": task.Errors, + "message": message, "repo-id": task.RepoID, "repo-name": opts.RepoName, "start": task.StartTime, diff --git a/routers/routes/web.go b/routers/web/web.go similarity index 85% rename from routers/routes/web.go rename to routers/web/web.go index 247083cf7468..aefcef184fc6 100644 --- a/routers/routes/web.go +++ b/routers/web/web.go @@ -2,16 +2,13 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package routes +package web import ( "encoding/gob" - "fmt" "net/http" - "net/url" "os" "path" - "strings" "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" @@ -22,20 +19,18 @@ import ( "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/storage" "code.gitea.io/gitea/modules/templates" - "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/validation" "code.gitea.io/gitea/modules/web" - "code.gitea.io/gitea/routers" - "code.gitea.io/gitea/routers/admin" - apiv1 "code.gitea.io/gitea/routers/api/v1" "code.gitea.io/gitea/routers/api/v1/misc" - "code.gitea.io/gitea/routers/dev" - "code.gitea.io/gitea/routers/events" - "code.gitea.io/gitea/routers/org" - "code.gitea.io/gitea/routers/private" - "code.gitea.io/gitea/routers/repo" - "code.gitea.io/gitea/routers/user" - userSetting "code.gitea.io/gitea/routers/user/setting" + "code.gitea.io/gitea/routers/web/admin" + "code.gitea.io/gitea/routers/web/dev" + "code.gitea.io/gitea/routers/web/events" + "code.gitea.io/gitea/routers/web/explore" + "code.gitea.io/gitea/routers/web/org" + "code.gitea.io/gitea/routers/web/repo" + "code.gitea.io/gitea/routers/web/user" + userSetting "code.gitea.io/gitea/routers/web/user/setting" + "code.gitea.io/gitea/services/auth" "code.gitea.io/gitea/services/forms" "code.gitea.io/gitea/services/lfs" "code.gitea.io/gitea/services/mailer" @@ -46,12 +41,10 @@ import ( "gitea.com/go-chi/captcha" "gitea.com/go-chi/session" "github.com/NYTimes/gziphandler" - "github.com/chi-middleware/proxy" "github.com/go-chi/chi/middleware" "github.com/go-chi/cors" "github.com/prometheus/client_golang/prometheus" "github.com/tstranex/u2f" - "github.com/unknwon/com" ) const ( @@ -59,78 +52,35 @@ const ( GzipMinSize = 1400 ) -func commonMiddlewares() []func(http.Handler) http.Handler { - var handlers = []func(http.Handler) http.Handler{ - func(next http.Handler) http.Handler { - return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { - next.ServeHTTP(context.NewResponse(resp), req) - }) - }, - } - - if setting.ReverseProxyLimit > 0 { - opt := proxy.NewForwardedHeadersOptions(). - WithForwardLimit(setting.ReverseProxyLimit). - ClearTrustedProxies() - for _, n := range setting.ReverseProxyTrustedProxies { - if !strings.Contains(n, "/") { - opt.AddTrustedProxy(n) - } else { - opt.AddTrustedNetwork(n) - } - } - handlers = append(handlers, proxy.ForwardedHeaders(opt)) +// CorsHandler return a http handler who set CORS options if enabled by config +func CorsHandler() func(next http.Handler) http.Handler { + if setting.CORSConfig.Enabled { + return cors.Handler(cors.Options{ + //Scheme: setting.CORSConfig.Scheme, // FIXME: the cors middleware needs scheme option + AllowedOrigins: setting.CORSConfig.AllowDomain, + //setting.CORSConfig.AllowSubdomain // FIXME: the cors middleware needs allowSubdomain option + AllowedMethods: setting.CORSConfig.Methods, + AllowCredentials: setting.CORSConfig.AllowCredentials, + MaxAge: int(setting.CORSConfig.MaxAge.Seconds()), + }) } - handlers = append(handlers, middleware.StripSlashes) - - if !setting.DisableRouterLog && setting.RouterLogLevel != log.NONE { - if log.GetLogger("router").GetLevel() <= setting.RouterLogLevel { - handlers = append(handlers, LoggerHandler(setting.RouterLogLevel)) - } + return func(next http.Handler) http.Handler { + return next } - - handlers = append(handlers, func(next http.Handler) http.Handler { - return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { - // Why we need this? The Recovery() will try to render a beautiful - // error page for user, but the process can still panic again, and other - // middleware like session also may panic then we have to recover twice - // and send a simple error page that should not panic any more. - defer func() { - if err := recover(); err != nil { - combinedErr := fmt.Sprintf("PANIC: %v\n%s", err, string(log.Stack(2))) - log.Error("%v", combinedErr) - if setting.IsProd() { - http.Error(resp, http.StatusText(500), 500) - } else { - http.Error(resp, combinedErr, 500) - } - } - }() - next.ServeHTTP(resp, req) - }) - }) - return handlers } -// NormalRoutes represents non install routes -func NormalRoutes() *web.Route { - r := web.NewRoute() - for _, middle := range commonMiddlewares() { - r.Use(middle) - } +// Routes returns all web routes +func Routes() *web.Route { + routes := web.NewRoute() - r.Mount("/", WebRoutes()) - r.Mount("/api/v1", apiv1.Routes()) - r.Mount("/api/internal", private.Routes()) - return r -} - -// WebRoutes returns all web routes -func WebRoutes() *web.Route { - r := web.NewRoute() + routes.Use(public.AssetsHandler(&public.Options{ + Directory: path.Join(setting.StaticRootPath, "public"), + Prefix: "/assets", + CorsHandler: CorsHandler(), + })) - r.Use(session.Sessioner(session.Options{ + routes.Use(session.Sessioner(session.Options{ Provider: setting.SessionConfig.Provider, ProviderConfig: setting.SessionConfig.ProviderConfig, CookieName: setting.SessionConfig.CookieName, @@ -138,157 +88,91 @@ func WebRoutes() *web.Route { Gclifetime: setting.SessionConfig.Gclifetime, Maxlifetime: setting.SessionConfig.Maxlifetime, Secure: setting.SessionConfig.Secure, + SameSite: setting.SessionConfig.SameSite, Domain: setting.SessionConfig.Domain, })) - r.Use(Recovery()) + routes.Use(Recovery()) - r.Use(public.Custom( - &public.Options{ - SkipLogging: setting.DisableRouterLog, - }, - )) - r.Use(public.Static( - &public.Options{ - Directory: path.Join(setting.StaticRootPath, "public"), - SkipLogging: setting.DisableRouterLog, - Prefix: "/assets", - }, - )) + // We use r.Route here over r.Use because this prevents requests that are not for avatars having to go through this additional handler + routes.Route("/avatars/*", "GET, HEAD", storageHandler(setting.Avatar.Storage, "avatars", storage.Avatars)) + routes.Route("/repo-avatars/*", "GET, HEAD", storageHandler(setting.RepoAvatar.Storage, "repo-avatars", storage.RepoAvatars)) - r.Use(storageHandler(setting.Avatar.Storage, "avatars", storage.Avatars)) - r.Use(storageHandler(setting.RepoAvatar.Storage, "repo-avatars", storage.RepoAvatars)) + // for health check - doeesn't need to be passed through gzip handler + routes.Head("/", func(w http.ResponseWriter, req *http.Request) { + w.WriteHeader(http.StatusOK) + }) + + // this png is very likely to always be below the limit for gzip so it doesn't need to pass through gzip + routes.Get("/apple-touch-icon.png", func(w http.ResponseWriter, req *http.Request) { + http.Redirect(w, req, path.Join(setting.StaticURLPrefix, "/assets/img/apple-touch-icon.png"), 301) + }) gob.Register(&u2f.Challenge{}) + common := []interface{}{} + if setting.EnableGzip { h, err := gziphandler.GzipHandlerWithOpts(gziphandler.MinSize(GzipMinSize)) if err != nil { log.Fatal("GzipHandlerWithOpts failed: %v", err) } - r.Use(h) + common = append(common, h) } mailer.InitMailRender(templates.Mailer()) if setting.Service.EnableCaptcha { - r.Use(captcha.Captchaer(context.GetImageCaptcha())) - } - // Removed: toolbox.Toolboxer middleware will provide debug informations which seems unnecessary - r.Use(context.Contexter()) - // GetHead allows a HEAD request redirect to GET if HEAD method is not defined for that route - r.Use(middleware.GetHead) - - if setting.EnableAccessLog { - r.Use(context.AccessLogger()) + // The captcha http.Handler should only fire on /captcha/* so we can just mount this on that url + routes.Route("/captcha/*", "GET,HEAD", append(common, captcha.Captchaer(context.GetImageCaptcha()))...) } - r.Use(user.GetNotificationCount) - r.Use(repo.GetActiveStopwatch) - r.Use(func(ctx *context.Context) { - ctx.Data["UnitWikiGlobalDisabled"] = models.UnitTypeWiki.UnitGlobalDisabled() - ctx.Data["UnitIssuesGlobalDisabled"] = models.UnitTypeIssues.UnitGlobalDisabled() - ctx.Data["UnitPullsGlobalDisabled"] = models.UnitTypePullRequests.UnitGlobalDisabled() - ctx.Data["UnitProjectsGlobalDisabled"] = models.UnitTypeProjects.UnitGlobalDisabled() - }) - - // for health check - r.Head("/", func(w http.ResponseWriter, req *http.Request) { - w.WriteHeader(http.StatusOK) - }) - if setting.HasRobotsTxt { - r.Get("/robots.txt", func(w http.ResponseWriter, req *http.Request) { + routes.Get("/robots.txt", append(common, func(w http.ResponseWriter, req *http.Request) { filePath := path.Join(setting.CustomPath, "robots.txt") fi, err := os.Stat(filePath) if err == nil && httpcache.HandleTimeCache(req, w, fi) { return } http.ServeFile(w, req, filePath) - }) + })...) } - r.Get("/apple-touch-icon.png", func(w http.ResponseWriter, req *http.Request) { - http.Redirect(w, req, path.Join(setting.StaticURLPrefix, "img/apple-touch-icon.png"), 301) - }) - - // prometheus metrics endpoint + // prometheus metrics endpoint - do not need to go through contexter if setting.Metrics.Enabled { c := metrics.NewCollector() prometheus.MustRegister(c) - r.Get("/metrics", routers.Metrics) + routes.Get("/metrics", append(common, Metrics)...) } - if setting.API.EnableSwagger { - // Note: The route moved from apiroutes because it's in fact want to render a web page - r.Get("/api/swagger", misc.Swagger) // Render V1 by default - } + // Removed: toolbox.Toolboxer middleware will provide debug information which seems unnecessary + common = append(common, context.Contexter()) - RegisterRoutes(r) + // Get user from session if logged in. + common = append(common, context.Auth(auth.NewGroup(auth.Methods()...))) - return r -} + // GetHead allows a HEAD request redirect to GET if HEAD method is not defined for that route + common = append(common, middleware.GetHead) -func goGet(ctx *context.Context) { - if ctx.Query("go-get") != "1" { - return + if setting.API.EnableSwagger { + // Note: The route moved from apiroutes because it's in fact want to render a web page + routes.Get("/api/swagger", append(common, misc.Swagger)...) // Render V1 by default } - // Quick responses appropriate go-get meta with status 200 - // regardless of if user have access to the repository, - // or the repository does not exist at all. - // This is particular a workaround for "go get" command which does not respect - // .netrc file. - - ownerName := ctx.Params(":username") - repoName := ctx.Params(":reponame") - trimmedRepoName := strings.TrimSuffix(repoName, ".git") - - if ownerName == "" || trimmedRepoName == "" { - _, _ = ctx.Write([]byte(` - - - invalid import path - - -`)) - ctx.Status(400) - return - } - branchName := setting.Repository.DefaultBranch + // TODO: These really seem like things that could be folded into Contexter or as helper functions + common = append(common, user.GetNotificationCount) + common = append(common, repo.GetActiveStopwatch) + common = append(common, goGet) - repo, err := models.GetRepositoryByOwnerAndName(ownerName, repoName) - if err == nil && len(repo.DefaultBranch) > 0 { - branchName = repo.DefaultBranch + others := web.NewRoute() + for _, middle := range common { + others.Use(middle) } - prefix := setting.AppURL + path.Join(url.PathEscape(ownerName), url.PathEscape(repoName), "src", "branch", util.PathEscapeSegments(branchName)) - appURL, _ := url.Parse(setting.AppURL) - - insecure := "" - if appURL.Scheme == string(setting.HTTP) { - insecure = "--insecure " - } - ctx.Header().Set("Content-Type", "text/html") - ctx.Status(http.StatusOK) - _, _ = ctx.Write([]byte(com.Expand(` - - - - - - - go get {Insecure}{GoGetImport} - - -`, map[string]string{ - "GoGetImport": context.ComposeGoGetImport(ownerName, trimmedRepoName), - "CloneLink": models.ComposeHTTPSCloneURL(ownerName, repoName), - "GoDocDirectory": prefix + "{/dir}", - "GoDocFile": prefix + "{/dir}/{file}#L{line}", - "Insecure": insecure, - }))) + RegisterRoutes(others) + routes.Mount("", others) + return routes } // RegisterRoutes register routes @@ -332,20 +216,27 @@ func RegisterRoutes(m *web.Route) { } } + lfsServerEnabled := func(ctx *context.Context) { + if !setting.LFS.StartServer { + ctx.Error(http.StatusNotFound) + return + } + } + // FIXME: not all routes need go through same middleware. // Especially some AJAX requests, we can reduce middleware number to improve performance. // Routers. // for health check - m.Get("/", ignSignIn, routers.Home) + m.Get("/", ignSignIn, Home) m.Get("/.well-known/openid-configuration", user.OIDCWellKnown) m.Group("/explore", func() { m.Get("", func(ctx *context.Context) { ctx.Redirect(setting.AppSubURL + "/explore/repos") }) - m.Get("/repos", routers.ExploreRepos) - m.Get("/users", routers.ExploreUsers) - m.Get("/organizations", routers.ExploreOrganizations) - m.Get("/code", routers.ExploreCode) + m.Get("/repos", explore.Repos) + m.Get("/users", explore.Users) + m.Get("/organizations", explore.Organizations) + m.Get("/code", explore.Code) }, ignExploreSignIn) m.Get("/issues", reqSignIn, user.Issues) m.Get("/pulls", reqSignIn, user.Pulls) @@ -401,18 +292,9 @@ func RegisterRoutes(m *web.Route) { // TODO manage redirection m.Post("/authorize", bindIgnErr(forms.AuthorizationForm{}), user.AuthorizeOAuth) }, ignSignInAndCsrf, reqSignIn) - if setting.CORSConfig.Enabled { - m.Post("/login/oauth/access_token", cors.Handler(cors.Options{ - //Scheme: setting.CORSConfig.Scheme, // FIXME: the cors middleware needs scheme option - AllowedOrigins: setting.CORSConfig.AllowDomain, - //setting.CORSConfig.AllowSubdomain // FIXME: the cors middleware needs allowSubdomain option - AllowedMethods: setting.CORSConfig.Methods, - AllowCredentials: setting.CORSConfig.AllowCredentials, - MaxAge: int(setting.CORSConfig.MaxAge.Seconds()), - }), bindIgnErr(forms.AccessTokenForm{}), ignSignInAndCsrf, user.AccessTokenOAuth) - } else { - m.Post("/login/oauth/access_token", bindIgnErr(forms.AccessTokenForm{}), ignSignInAndCsrf, user.AccessTokenOAuth) - } + m.Get("/login/oauth/userinfo", ignSignInAndCsrf, user.InfoOAuth) + m.Post("/login/oauth/access_token", CorsHandler(), bindIgnErr(forms.AccessTokenForm{}), ignSignInAndCsrf, user.AccessTokenOAuth) + m.Get("/login/oauth/keys", ignSignInAndCsrf, user.OIDCKeys) m.Group("/user/settings", func() { m.Get("", userSetting.Profile) @@ -712,12 +594,21 @@ func RegisterRoutes(m *web.Route) { m.Post("/delete", repo.DeleteTeam) }) }) + m.Group("/branches", func() { m.Combo("").Get(repo.ProtectedBranch).Post(repo.ProtectedBranchPost) m.Combo("/*").Get(repo.SettingsProtectedBranch). Post(bindIgnErr(forms.ProtectBranchForm{}), context.RepoMustNotBeArchived(), repo.SettingsProtectedBranchPost) }, repo.MustBeNotEmpty) + m.Group("/tags", func() { + m.Get("", repo.Tags) + m.Post("", bindIgnErr(forms.ProtectTagForm{}), context.RepoMustNotBeArchived(), repo.NewProtectedTagPost) + m.Post("/delete", context.RepoMustNotBeArchived(), repo.DeleteProtectedTagPost) + m.Get("/{id}", repo.EditProtectedTag) + m.Post("/{id}", bindIgnErr(forms.ProtectTagForm{}), context.RepoMustNotBeArchived(), repo.EditProtectedTagPost) + }) + m.Group("/hooks/git", func() { m.Get("", repo.GitHooks) m.Combo("/{name}").Get(repo.GitHooksEdit). @@ -903,8 +794,8 @@ func RegisterRoutes(m *web.Route) { m.Get("/", repo.Releases) m.Get("/tag/*", repo.SingleRelease) m.Get("/latest", repo.LatestRelease) - m.Get("/attachments/{uuid}", repo.GetAttachment) - }, repo.MustBeNotEmpty, reqRepoReleaseReader, context.RepoRefByType(context.RepoRefTag)) + }, repo.MustBeNotEmpty, reqRepoReleaseReader, context.RepoRefByType(context.RepoRefTag, true)) + m.Get("/releases/attachments/{uuid}", repo.GetAttachment, repo.MustBeNotEmpty, reqRepoReleaseReader) m.Group("/releases", func() { m.Get("/new", repo.NewRelease) m.Post("/new", bindIgnErr(forms.NewReleaseForm{}), repo.NewReleasePost) @@ -1094,25 +985,25 @@ func RegisterRoutes(m *web.Route) { m.Group("/{username}", func() { m.Group("/{reponame}", func() { m.Get("", repo.SetEditorconfigIfExists, repo.Home) - }, goGet, ignSignIn, context.RepoAssignment, context.RepoRef(), context.UnitTypes()) + }, ignSignIn, context.RepoAssignment, context.RepoRef(), context.UnitTypes()) m.Group("/{reponame}", func() { m.Group("/info/lfs", func() { - m.Post("/objects/batch", lfs.BatchHandler) - m.Get("/objects/{oid}/{filename}", lfs.ObjectOidHandler) - m.Any("/objects/{oid}", lfs.ObjectOidHandler) - m.Post("/objects", lfs.PostHandler) - m.Post("/verify", lfs.VerifyHandler) + m.Post("/objects/batch", lfs.CheckAcceptMediaType, lfs.BatchHandler) + m.Put("/objects/{oid}/{size}", lfs.UploadHandler) + m.Get("/objects/{oid}/{filename}", lfs.DownloadHandler) + m.Get("/objects/{oid}", lfs.DownloadHandler) + m.Post("/verify", lfs.CheckAcceptMediaType, lfs.VerifyHandler) m.Group("/locks", func() { m.Get("/", lfs.GetListLockHandler) m.Post("/", lfs.PostLockHandler) m.Post("/verify", lfs.VerifyLockHandler) m.Post("/{lid}/unlock", lfs.UnLockHandler) - }) + }, lfs.CheckAcceptMediaType) m.Any("/*", func(ctx *context.Context) { ctx.NotFound("", nil) }) - }, ignSignInAndCsrf) + }, ignSignInAndCsrf, lfsServerEnabled) m.Group("", func() { m.Post("/git-upload-pack", repo.ServiceUploadPack) @@ -1140,9 +1031,6 @@ func RegisterRoutes(m *web.Route) { }, reqSignIn) if setting.API.EnableSwagger { - m.Get("/swagger.v1.json", routers.SwaggerV1Json) + m.Get("/swagger.v1.json", SwaggerV1Json) } - - // Not found handler. - m.NotFound(web.Wrap(routers.NotFound)) } diff --git a/services/archiver/archiver.go b/services/archiver/archiver.go index 359fc8b627d9..00c028130681 100644 --- a/services/archiver/archiver.go +++ b/services/archiver/archiver.go @@ -6,22 +6,20 @@ package archiver import ( + "errors" + "fmt" "io" - "io/ioutil" "os" - "path" "regexp" "strings" - "sync" - "time" - "code.gitea.io/gitea/modules/base" - "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/queue" "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/modules/storage" ) // ArchiveRequest defines the parameters of an archive request, which notably @@ -30,223 +28,174 @@ import ( // This is entirely opaque to external entities, though, and mostly used as a // handle elsewhere. type ArchiveRequest struct { - uri string - repo *git.Repository - refName string - ext string - archivePath string - archiveType git.ArchiveType - archiveComplete bool - commit *git.Commit - cchan chan struct{} + RepoID int64 + refName string + Type git.ArchiveType + CommitID string } -var archiveInProgress []*ArchiveRequest -var archiveMutex sync.Mutex - // SHA1 hashes will only go up to 40 characters, but SHA256 hashes will go all // the way to 64. var shaRegex = regexp.MustCompile(`^[0-9a-f]{4,64}$`) -// These facilitate testing, by allowing the unit tests to control (to some extent) -// the goroutine used for processing the queue. -var archiveQueueMutex *sync.Mutex -var archiveQueueStartCond *sync.Cond -var archiveQueueReleaseCond *sync.Cond - -// GetArchivePath returns the path from which we can serve this archive. -func (aReq *ArchiveRequest) GetArchivePath() string { - return aReq.archivePath -} - -// GetArchiveName returns the name of the caller, based on the ref used by the -// caller to create this request. -func (aReq *ArchiveRequest) GetArchiveName() string { - return aReq.refName + aReq.ext -} - -// IsComplete returns the completion status of this request. -func (aReq *ArchiveRequest) IsComplete() bool { - return aReq.archiveComplete -} - -// WaitForCompletion will wait for this request to complete, with no timeout. -// It returns whether the archive was actually completed, as the channel could -// have also been closed due to an error. -func (aReq *ArchiveRequest) WaitForCompletion(ctx *context.Context) bool { - select { - case <-aReq.cchan: - case <-ctx.Req.Context().Done(): - } - - return aReq.IsComplete() -} - -// TimedWaitForCompletion will wait for this request to complete, with timeout -// happening after the specified Duration. It returns whether the archive is -// now complete and whether we hit the timeout or not. The latter may not be -// useful if the request is complete or we started to shutdown. -func (aReq *ArchiveRequest) TimedWaitForCompletion(ctx *context.Context, dur time.Duration) (bool, bool) { - timeout := false - select { - case <-time.After(dur): - timeout = true - case <-aReq.cchan: - case <-ctx.Req.Context().Done(): - } - - return aReq.IsComplete(), timeout -} - -// The caller must hold the archiveMutex across calls to getArchiveRequest. -func getArchiveRequest(repo *git.Repository, commit *git.Commit, archiveType git.ArchiveType) *ArchiveRequest { - for _, r := range archiveInProgress { - // Need to be referring to the same repository. - if r.repo.Path == repo.Path && r.commit.ID == commit.ID && r.archiveType == archiveType { - return r - } - } - return nil -} - -// DeriveRequestFrom creates an archival request, based on the URI. The +// NewRequest creates an archival request, based on the URI. The // resulting ArchiveRequest is suitable for being passed to ArchiveRepository() // if it's determined that the request still needs to be satisfied. -func DeriveRequestFrom(ctx *context.Context, uri string) *ArchiveRequest { - if ctx.Repo == nil || ctx.Repo.GitRepo == nil { - log.Trace("Repo not initialized") - return nil - } +func NewRequest(repoID int64, repo *git.Repository, uri string) (*ArchiveRequest, error) { r := &ArchiveRequest{ - uri: uri, - repo: ctx.Repo.GitRepo, + RepoID: repoID, } + var ext string switch { case strings.HasSuffix(uri, ".zip"): - r.ext = ".zip" - r.archivePath = path.Join(r.repo.Path, "archives/zip") - r.archiveType = git.ZIP + ext = ".zip" + r.Type = git.ZIP case strings.HasSuffix(uri, ".tar.gz"): - r.ext = ".tar.gz" - r.archivePath = path.Join(r.repo.Path, "archives/targz") - r.archiveType = git.TARGZ + ext = ".tar.gz" + r.Type = git.TARGZ default: - log.Trace("Unknown format: %s", uri) - return nil + return nil, fmt.Errorf("Unknown format: %s", uri) } - r.refName = strings.TrimSuffix(r.uri, r.ext) - isDir, err := util.IsDir(r.archivePath) - if err != nil { - ctx.ServerError("Download -> util.IsDir(archivePath)", err) - return nil - } - if !isDir { - if err := os.MkdirAll(r.archivePath, os.ModePerm); err != nil { - ctx.ServerError("Download -> os.MkdirAll(archivePath)", err) - return nil - } - } + r.refName = strings.TrimSuffix(uri, ext) + var err error // Get corresponding commit. - if r.repo.IsBranchExist(r.refName) { - r.commit, err = r.repo.GetBranchCommit(r.refName) + if repo.IsBranchExist(r.refName) { + r.CommitID, err = repo.GetBranchCommitID(r.refName) if err != nil { - ctx.ServerError("GetBranchCommit", err) - return nil + return nil, err } - } else if r.repo.IsTagExist(r.refName) { - r.commit, err = r.repo.GetTagCommit(r.refName) + } else if repo.IsTagExist(r.refName) { + r.CommitID, err = repo.GetTagCommitID(r.refName) if err != nil { - ctx.ServerError("GetTagCommit", err) - return nil + return nil, err } } else if shaRegex.MatchString(r.refName) { - r.commit, err = r.repo.GetCommit(r.refName) - if err != nil { - ctx.NotFound("GetCommit", nil) - return nil + if repo.IsCommitExist(r.refName) { + r.CommitID = r.refName + } else { + return nil, git.ErrNotExist{ + ID: r.refName, + } } } else { - ctx.NotFound("DeriveRequestFrom", nil) - return nil + return nil, fmt.Errorf("Unknow ref %s type", r.refName) } - archiveMutex.Lock() - defer archiveMutex.Unlock() - if rExisting := getArchiveRequest(r.repo, r.commit, r.archiveType); rExisting != nil { - return rExisting - } + return r, nil +} + +// GetArchiveName returns the name of the caller, based on the ref used by the +// caller to create this request. +func (aReq *ArchiveRequest) GetArchiveName() string { + return strings.ReplaceAll(aReq.refName, "/", "-") + "." + aReq.Type.String() +} - r.archivePath = path.Join(r.archivePath, base.ShortSha(r.commit.ID.String())+r.ext) - r.archiveComplete, err = util.IsFile(r.archivePath) +func doArchive(r *ArchiveRequest) (*models.RepoArchiver, error) { + ctx, commiter, err := models.TxDBContext() if err != nil { - ctx.ServerError("util.IsFile", err) - return nil + return nil, err } - return r -} + defer commiter.Close() -func doArchive(r *ArchiveRequest) { - var ( - err error - tmpArchive *os.File - destArchive *os.File - ) - - // Close the channel to indicate to potential waiters that this request - // has finished. - defer close(r.cchan) - - // It could have happened that we enqueued two archival requests, due to - // race conditions and difficulties in locking. Do one last check that - // the archive we're referring to doesn't already exist. If it does exist, - // then just mark the request as complete and move on. - isFile, err := util.IsFile(r.archivePath) + archiver, err := models.GetRepoArchiver(ctx, r.RepoID, r.Type, r.CommitID) if err != nil { - log.Error("Unable to check if %s util.IsFile: %v. Will ignore and recreate.", r.archivePath, err) + return nil, err } - if isFile { - r.archiveComplete = true - return + + if archiver != nil { + // FIXME: If another process are generating it, we think it's not ready and just return + // Or we should wait until the archive generated. + if archiver.Status == models.RepoArchiverGenerating { + return nil, nil + } + } else { + archiver = &models.RepoArchiver{ + RepoID: r.RepoID, + Type: r.Type, + CommitID: r.CommitID, + Status: models.RepoArchiverGenerating, + } + if err := models.AddRepoArchiver(ctx, archiver); err != nil { + return nil, err + } } - // Create a temporary file to use while the archive is being built. We - // will then copy it into place (r.archivePath) once it's fully - // constructed. - tmpArchive, err = ioutil.TempFile("", "archive") + rPath, err := archiver.RelativePath() if err != nil { - log.Error("Unable to create a temporary archive file! Error: %v", err) - return + return nil, err + } + + _, err = storage.RepoArchives.Stat(rPath) + if err == nil { + if archiver.Status == models.RepoArchiverGenerating { + archiver.Status = models.RepoArchiverReady + return archiver, models.UpdateRepoArchiverStatus(ctx, archiver) + } + return archiver, nil + } + + if !errors.Is(err, os.ErrNotExist) { + return nil, fmt.Errorf("unable to stat archive: %v", err) } + + rd, w := io.Pipe() defer func() { - tmpArchive.Close() - os.Remove(tmpArchive.Name()) + w.Close() + rd.Close() }() + var done = make(chan error) + repo, err := archiver.LoadRepo() + if err != nil { + return nil, fmt.Errorf("archiver.LoadRepo failed: %v", err) + } - if err = r.commit.CreateArchive(graceful.GetManager().ShutdownContext(), tmpArchive.Name(), git.CreateArchiveOpts{ - Format: r.archiveType, - Prefix: setting.Repository.PrefixArchiveFiles, - }); err != nil { - log.Error("Download -> CreateArchive "+tmpArchive.Name(), err) - return + gitRepo, err := git.OpenRepository(repo.RepoPath()) + if err != nil { + return nil, err } + defer gitRepo.Close() + + go func(done chan error, w *io.PipeWriter, archiver *models.RepoArchiver, gitRepo *git.Repository) { + defer func() { + if r := recover(); r != nil { + done <- fmt.Errorf("%v", r) + } + }() + + err = gitRepo.CreateArchive( + graceful.GetManager().ShutdownContext(), + archiver.Type, + w, + setting.Repository.PrefixArchiveFiles, + archiver.CommitID, + ) + _ = w.CloseWithError(err) + done <- err + }(done, w, archiver, gitRepo) + + // TODO: add lfs data to zip + // TODO: add submodule data to zip - // Now we copy it into place - if destArchive, err = os.Create(r.archivePath); err != nil { - log.Error("Unable to open archive " + r.archivePath) - return + if _, err := storage.RepoArchives.Save(rPath, rd, -1); err != nil { + return nil, fmt.Errorf("unable to write archive: %v", err) } - _, err = io.Copy(destArchive, tmpArchive) - destArchive.Close() + + err = <-done if err != nil { - log.Error("Unable to write archive " + r.archivePath) - return + return nil, err + } + + if archiver.Status == models.RepoArchiverGenerating { + archiver.Status = models.RepoArchiverReady + if err = models.UpdateRepoArchiverStatus(ctx, archiver); err != nil { + return nil, err + } } - // Block any attempt to finalize creating a new request if we're marking - r.archiveComplete = true + return archiver, commiter.Commit() } // ArchiveRepository satisfies the ArchiveRequest being passed in. Processing @@ -255,65 +204,46 @@ func doArchive(r *ArchiveRequest) { // anything. In all cases, the caller should be examining the *ArchiveRequest // being returned for completion, as it may be different than the one they passed // in. -func ArchiveRepository(request *ArchiveRequest) *ArchiveRequest { - // We'll return the request that's already been enqueued if it has been - // enqueued, or we'll immediately enqueue it if it has not been enqueued - // and it is not marked complete. - archiveMutex.Lock() - defer archiveMutex.Unlock() - if rExisting := getArchiveRequest(request.repo, request.commit, request.archiveType); rExisting != nil { - return rExisting - } - if request.archiveComplete { - return request - } +func ArchiveRepository(request *ArchiveRequest) (*models.RepoArchiver, error) { + return doArchive(request) +} + +var archiverQueue queue.UniqueQueue - request.cchan = make(chan struct{}) - archiveInProgress = append(archiveInProgress, request) - go func() { - // Wait to start, if we have the Cond for it. This is currently only - // useful for testing, so that the start and release of queued entries - // can be controlled to examine the queue. - if archiveQueueStartCond != nil { - archiveQueueMutex.Lock() - archiveQueueStartCond.Wait() - archiveQueueMutex.Unlock() +// Init initlize archive +func Init() error { + handler := func(data ...queue.Data) { + for _, datum := range data { + archiveReq, ok := datum.(*ArchiveRequest) + if !ok { + log.Error("Unable to process provided datum: %v - not possible to cast to IndexerData", datum) + continue + } + log.Trace("ArchiverData Process: %#v", archiveReq) + if _, err := doArchive(archiveReq); err != nil { + log.Error("Archive %v faild: %v", datum, err) + } } + } - // Drop the mutex while we process the request. This may take a long - // time, and it's not necessary now that we've added the reequest to - // archiveInProgress. - doArchive(request) + archiverQueue = queue.CreateUniqueQueue("repo-archive", handler, new(ArchiveRequest)) + if archiverQueue == nil { + return errors.New("unable to create codes indexer queue") + } - if archiveQueueReleaseCond != nil { - archiveQueueMutex.Lock() - archiveQueueReleaseCond.Wait() - archiveQueueMutex.Unlock() - } + go graceful.GetManager().RunWithShutdownFns(archiverQueue.Run) - // Purge this request from the list. To do so, we'll just take the - // index at which we ended up at and swap the final element into that - // position, then chop off the now-redundant final element. The slice - // may have change in between these two segments and we may have moved, - // so we search for it here. We could perhaps avoid this search - // entirely if len(archiveInProgress) == 1, but we should verify - // correctness. - archiveMutex.Lock() - defer archiveMutex.Unlock() - - idx := -1 - for _idx, req := range archiveInProgress { - if req == request { - idx = _idx - break - } - } - if idx == -1 { - log.Error("ArchiveRepository: Failed to find request for removal.") - return - } - archiveInProgress = append(archiveInProgress[:idx], archiveInProgress[idx+1:]...) - }() + return nil +} - return request +// StartArchive push the archive request to the queue +func StartArchive(request *ArchiveRequest) error { + has, err := archiverQueue.Has(request) + if err != nil { + return err + } + if has { + return nil + } + return archiverQueue.Push(request) } diff --git a/services/archiver/archiver_test.go b/services/archiver/archiver_test.go index 84bab148182a..3f3f369987b5 100644 --- a/services/archiver/archiver_test.go +++ b/services/archiver/archiver_test.go @@ -6,108 +6,75 @@ package archiver import ( "path/filepath" - "sync" "testing" "time" "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/test" - "code.gitea.io/gitea/modules/util" "github.com/stretchr/testify/assert" ) -var queueMutex sync.Mutex - func TestMain(m *testing.M) { models.MainTest(m, filepath.Join("..", "..")) } func waitForCount(t *testing.T, num int) { - var numQueued int - - // Wait for up to 10 seconds for the queue to be impacted. - timeout := time.Now().Add(10 * time.Second) - for { - numQueued = len(archiveInProgress) - if numQueued == num || time.Now().After(timeout) { - break - } - } - - assert.Equal(t, num, len(archiveInProgress)) -} - -func releaseOneEntry(t *testing.T, inFlight []*ArchiveRequest) { - var nowQueued, numQueued int - - numQueued = len(archiveInProgress) - - // Release one, then wait up to 10 seconds for it to complete. - queueMutex.Lock() - archiveQueueReleaseCond.Signal() - queueMutex.Unlock() - timeout := time.Now().Add(10 * time.Second) - for { - nowQueued = len(archiveInProgress) - if nowQueued != numQueued || time.Now().After(timeout) { - break - } - } - - // Make sure we didn't just timeout. - assert.NotEqual(t, numQueued, nowQueued) - // Also make sure that we released only one. - assert.Equal(t, numQueued-1, nowQueued) } func TestArchive_Basic(t *testing.T) { assert.NoError(t, models.PrepareTestDatabase()) - archiveQueueMutex = &queueMutex - archiveQueueStartCond = sync.NewCond(&queueMutex) - archiveQueueReleaseCond = sync.NewCond(&queueMutex) - defer func() { - archiveQueueMutex = nil - archiveQueueStartCond = nil - archiveQueueReleaseCond = nil - }() - ctx := test.MockContext(t, "user27/repo49") firstCommit, secondCommit := "51f84af23134", "aacbdfe9e1c4" - bogusReq := DeriveRequestFrom(ctx, firstCommit+".zip") - assert.Nil(t, bogusReq) - test.LoadRepo(t, ctx, 49) - bogusReq = DeriveRequestFrom(ctx, firstCommit+".zip") - assert.Nil(t, bogusReq) - test.LoadGitRepo(t, ctx) defer ctx.Repo.GitRepo.Close() + bogusReq, err := NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".zip") + assert.NoError(t, err) + assert.NotNil(t, bogusReq) + assert.EqualValues(t, firstCommit+".zip", bogusReq.GetArchiveName()) + // Check a series of bogus requests. // Step 1, valid commit with a bad extension. - bogusReq = DeriveRequestFrom(ctx, firstCommit+".dilbert") + bogusReq, err = NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".dilbert") + assert.Error(t, err) assert.Nil(t, bogusReq) // Step 2, missing commit. - bogusReq = DeriveRequestFrom(ctx, "dbffff.zip") + bogusReq, err = NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, "dbffff.zip") + assert.Error(t, err) assert.Nil(t, bogusReq) // Step 3, doesn't look like branch/tag/commit. - bogusReq = DeriveRequestFrom(ctx, "db.zip") + bogusReq, err = NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, "db.zip") + assert.Error(t, err) assert.Nil(t, bogusReq) + bogusReq, err = NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, "master.zip") + assert.NoError(t, err) + assert.NotNil(t, bogusReq) + assert.EqualValues(t, "master.zip", bogusReq.GetArchiveName()) + + bogusReq, err = NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, "test/archive.zip") + assert.NoError(t, err) + assert.NotNil(t, bogusReq) + assert.EqualValues(t, "test-archive.zip", bogusReq.GetArchiveName()) + // Now two valid requests, firstCommit with valid extensions. - zipReq := DeriveRequestFrom(ctx, firstCommit+".zip") + zipReq, err := NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".zip") + assert.NoError(t, err) assert.NotNil(t, zipReq) - tgzReq := DeriveRequestFrom(ctx, firstCommit+".tar.gz") + tgzReq, err := NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".tar.gz") + assert.NoError(t, err) assert.NotNil(t, tgzReq) - secondReq := DeriveRequestFrom(ctx, secondCommit+".zip") + secondReq, err := NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, secondCommit+".zip") + assert.NoError(t, err) assert.NotNil(t, secondReq) inFlight := make([]*ArchiveRequest, 3) @@ -128,41 +95,9 @@ func TestArchive_Basic(t *testing.T) { // Sleep two seconds to make sure the queue doesn't change. time.Sleep(2 * time.Second) - assert.Equal(t, 3, len(archiveInProgress)) - - // Release them all, they'll then stall at the archiveQueueReleaseCond while - // we examine the queue state. - queueMutex.Lock() - archiveQueueStartCond.Broadcast() - queueMutex.Unlock() - - // Iterate through all of the in-flight requests and wait for their - // completion. - for _, req := range inFlight { - req.WaitForCompletion(ctx) - } - - for _, req := range inFlight { - assert.True(t, req.IsComplete()) - exist, err := util.IsExist(req.GetArchivePath()) - assert.NoError(t, err) - assert.True(t, exist) - } - - arbitraryReq := inFlight[0] - // Reopen the channel so we don't double-close, mark it incomplete. We're - // going to run it back through the archiver, and it should get marked - // complete again. - arbitraryReq.cchan = make(chan struct{}) - arbitraryReq.archiveComplete = false - doArchive(arbitraryReq) - assert.True(t, arbitraryReq.IsComplete()) - - // Queues should not have drained yet, because we haven't released them. - // Do so now. - assert.Equal(t, 3, len(archiveInProgress)) - - zipReq2 := DeriveRequestFrom(ctx, firstCommit+".zip") + + zipReq2, err := NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".zip") + assert.NoError(t, err) // This zipReq should match what's sitting in the queue, as we haven't // let it release yet. From the consumer's point of view, this looks like // a long-running archive task. @@ -173,46 +108,22 @@ func TestArchive_Basic(t *testing.T) { // predecessor has cleared out of the queue. ArchiveRepository(zipReq2) - // Make sure the queue hasn't grown any. - assert.Equal(t, 3, len(archiveInProgress)) - - // Make sure the queue drains properly - releaseOneEntry(t, inFlight) - assert.Equal(t, 2, len(archiveInProgress)) - releaseOneEntry(t, inFlight) - assert.Equal(t, 1, len(archiveInProgress)) - releaseOneEntry(t, inFlight) - assert.Equal(t, 0, len(archiveInProgress)) - // Now we'll submit a request and TimedWaitForCompletion twice, before and // after we release it. We should trigger both the timeout and non-timeout // cases. - var completed, timedout bool - timedReq := DeriveRequestFrom(ctx, secondCommit+".tar.gz") + timedReq, err := NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, secondCommit+".tar.gz") + assert.NoError(t, err) assert.NotNil(t, timedReq) ArchiveRepository(timedReq) - // Guaranteed to timeout; we haven't signalled the request to start.. - completed, timedout = timedReq.TimedWaitForCompletion(ctx, 2*time.Second) - assert.Equal(t, false, completed) - assert.Equal(t, true, timedout) - - queueMutex.Lock() - archiveQueueStartCond.Broadcast() - queueMutex.Unlock() - - // Shouldn't timeout, we've now signalled it and it's a small request. - completed, timedout = timedReq.TimedWaitForCompletion(ctx, 15*time.Second) - assert.Equal(t, true, completed) - assert.Equal(t, false, timedout) - - zipReq2 = DeriveRequestFrom(ctx, firstCommit+".zip") + zipReq2, err = NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".zip") + assert.NoError(t, err) // Now, we're guaranteed to have released the original zipReq from the queue. // Ensure that we don't get handed back the released entry somehow, but they // should remain functionally equivalent in all fields. The exception here // is zipReq.cchan, which will be non-nil because it's a completed request. // It's fine to go ahead and set it to nil now. - zipReq.cchan = nil + assert.Equal(t, zipReq, zipReq2) assert.False(t, zipReq == zipReq2) diff --git a/modules/auth/sso/sso.go b/services/auth/auth.go similarity index 72% rename from modules/auth/sso/sso.go rename to services/auth/auth.go index e670f1a8a719..5492a8b74ede 100644 --- a/modules/auth/sso/sso.go +++ b/services/auth/auth.go @@ -3,20 +3,22 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package sso +package auth import ( "fmt" "net/http" "reflect" + "regexp" "strings" "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/web/middleware" ) -// ssoMethods contains the list of SSO authentication plugins in the order they are expected to be +// authMethods contains the list of authentication plugins in the order they are expected to be // executed. // // The OAuth2 plugin is expected to be executed first, as it must ignore the user id stored @@ -25,11 +27,10 @@ import ( // // The Session plugin is expected to be executed second, in order to skip authentication // for users that have already signed in. -var ssoMethods = []SingleSignOn{ +var authMethods = []Auth{ &OAuth2{}, - &Session{}, - &ReverseProxy{}, &Basic{}, + &Session{}, } // The purpose of the following three function variables is to let the linter know that @@ -38,59 +39,40 @@ var ( _ = handleSignIn ) -// Methods returns the instances of all registered SSO methods -func Methods() []SingleSignOn { - return ssoMethods +// Methods returns the instances of all registered methods +func Methods() []Auth { + return authMethods } -// Register adds the specified instance to the list of available SSO methods -func Register(method SingleSignOn) { - ssoMethods = append(ssoMethods, method) +// Register adds the specified instance to the list of available methods +func Register(method Auth) { + authMethods = append(authMethods, method) } -// Init should be called exactly once when the application starts to allow SSO plugins +// Init should be called exactly once when the application starts to allow plugins // to allocate necessary resources func Init() { + if setting.Service.EnableReverseProxyAuth { + Register(&ReverseProxy{}) + } + specialInit() for _, method := range Methods() { err := method.Init() if err != nil { - log.Error("Could not initialize '%s' SSO method, error: %s", reflect.TypeOf(method).String(), err) + log.Error("Could not initialize '%s' auth method, error: %s", reflect.TypeOf(method).String(), err) } } } -// Free should be called exactly once when the application is terminating to allow SSO plugins +// Free should be called exactly once when the application is terminating to allow Auth plugins // to release necessary resources func Free() { for _, method := range Methods() { err := method.Free() if err != nil { - log.Error("Could not free '%s' SSO method, error: %s", reflect.TypeOf(method).String(), err) - } - } -} - -// SessionUser returns the user object corresponding to the "uid" session variable. -func SessionUser(sess SessionStore) *models.User { - // Get user ID - uid := sess.Get("uid") - if uid == nil { - return nil - } - id, ok := uid.(int64) - if !ok { - return nil - } - - // Get user object - user, err := models.GetUserByID(id) - if err != nil { - if !models.IsErrUserNotExist(err) { - log.Error("GetUserById: %v", err) + log.Error("Could not free '%s' auth method, error: %s", reflect.TypeOf(method).String(), err) } - return nil } - return user } // isAttachmentDownload check if request is a file download (GET) with URL to an attachment @@ -98,6 +80,19 @@ func isAttachmentDownload(req *http.Request) bool { return strings.HasPrefix(req.URL.Path, "/attachments/") && req.Method == "GET" } +var gitRawPathRe = regexp.MustCompile(`^/[a-zA-Z0-9_.-]+/[a-zA-Z0-9_.-]+/(?:(?:git-(?:(?:upload)|(?:receive))-pack$)|(?:info/refs$)|(?:HEAD$)|(?:objects/)|raw/)`) +var lfsPathRe = regexp.MustCompile(`^/[a-zA-Z0-9_.-]+/[a-zA-Z0-9_.-]+/info/lfs/`) + +func isGitRawOrLFSPath(req *http.Request) bool { + if gitRawPathRe.MatchString(req.URL.Path) { + return true + } + if setting.LFS.StartServer { + return lfsPathRe.MatchString(req.URL.Path) + } + return false +} + // handleSignIn clears existing session variables and stores new ones for the specified user object func handleSignIn(resp http.ResponseWriter, req *http.Request, sess SessionStore, user *models.User) { _ = sess.Delete("openid_verified_uri") diff --git a/services/auth/auth_test.go b/services/auth/auth_test.go new file mode 100644 index 000000000000..f6b43835f45f --- /dev/null +++ b/services/auth/auth_test.go @@ -0,0 +1,128 @@ +// Copyright 2014 The Gogs Authors. All rights reserved. +// Copyright 2019 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package auth + +import ( + "net/http" + "testing" + + "code.gitea.io/gitea/modules/setting" +) + +func Test_isGitRawOrLFSPath(t *testing.T) { + + tests := []struct { + path string + + want bool + }{ + { + "/owner/repo/git-upload-pack", + true, + }, + { + "/owner/repo/git-receive-pack", + true, + }, + { + "/owner/repo/info/refs", + true, + }, + { + "/owner/repo/HEAD", + true, + }, + { + "/owner/repo/objects/info/alternates", + true, + }, + { + "/owner/repo/objects/info/http-alternates", + true, + }, + { + "/owner/repo/objects/info/packs", + true, + }, + { + "/owner/repo/objects/info/blahahsdhsdkla", + true, + }, + { + "/owner/repo/objects/01/23456789abcdef0123456789abcdef01234567", + true, + }, + { + "/owner/repo/objects/pack/pack-123456789012345678921234567893124567894.pack", + true, + }, + { + "/owner/repo/objects/pack/pack-0123456789abcdef0123456789abcdef0123456.idx", + true, + }, + { + "/owner/repo/raw/branch/foo/fanaso", + true, + }, + { + "/owner/repo/stars", + false, + }, + { + "/notowner", + false, + }, + { + "/owner/repo", + false, + }, + { + "/owner/repo/commit/123456789012345678921234567893124567894", + false, + }, + } + lfsTests := []string{ + "/owner/repo/info/lfs/", + "/owner/repo/info/lfs/objects/batch", + "/owner/repo/info/lfs/objects/oid/filename", + "/owner/repo/info/lfs/objects/oid", + "/owner/repo/info/lfs/objects", + "/owner/repo/info/lfs/verify", + "/owner/repo/info/lfs/locks", + "/owner/repo/info/lfs/locks/verify", + "/owner/repo/info/lfs/locks/123/unlock", + } + + origLFSStartServer := setting.LFS.StartServer + + for _, tt := range tests { + t.Run(tt.path, func(t *testing.T) { + req, _ := http.NewRequest("POST", "http://localhost"+tt.path, nil) + setting.LFS.StartServer = false + if got := isGitRawOrLFSPath(req); got != tt.want { + t.Errorf("isGitOrLFSPath() = %v, want %v", got, tt.want) + } + setting.LFS.StartServer = true + if got := isGitRawOrLFSPath(req); got != tt.want { + t.Errorf("isGitOrLFSPath() = %v, want %v", got, tt.want) + } + }) + } + for _, tt := range lfsTests { + t.Run(tt, func(t *testing.T) { + req, _ := http.NewRequest("POST", tt, nil) + setting.LFS.StartServer = false + if got := isGitRawOrLFSPath(req); got != setting.LFS.StartServer { + t.Errorf("isGitOrLFSPath(%q) = %v, want %v, %v", tt, got, setting.LFS.StartServer, gitRawPathRe.MatchString(tt)) + } + setting.LFS.StartServer = true + if got := isGitRawOrLFSPath(req); got != setting.LFS.StartServer { + t.Errorf("isGitOrLFSPath(%q) = %v, want %v", tt, got, setting.LFS.StartServer) + } + }) + } + setting.LFS.StartServer = origLFSStartServer +} diff --git a/modules/auth/sso/basic.go b/services/auth/basic.go similarity index 61% rename from modules/auth/sso/basic.go rename to services/auth/basic.go index d2d25c6cece6..0bce4f1d067a 100644 --- a/modules/auth/sso/basic.go +++ b/services/auth/basic.go @@ -3,7 +3,7 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package sso +package auth import ( "net/http" @@ -14,19 +14,25 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/web/middleware" ) // Ensure the struct implements the interface. var ( - _ SingleSignOn = &Basic{} + _ Auth = &Basic{} ) -// Basic implements the SingleSignOn interface and authenticates requests (API requests +// Basic implements the Auth interface and authenticates requests (API requests // only) by looking for Basic authentication data or "x-oauth-basic" token in the "Authorization" // header. type Basic struct { } +// Name represents the name of auth method +func (b *Basic) Name() string { + return "basic" +} + // Init does nothing as the Basic implementation does not need to allocate any resources func (b *Basic) Init() error { return nil @@ -37,28 +43,26 @@ func (b *Basic) Free() error { return nil } -// IsEnabled returns true as this plugin is enabled by default and its not possible to disable -// it from settings. -func (b *Basic) IsEnabled() bool { - return setting.Service.EnableBasicAuth -} - -// VerifyAuthData extracts and validates Basic data (username and password/token) from the +// Verify extracts and validates Basic data (username and password/token) from the // "Authorization" header of the request and returns the corresponding user object for that // name/token on successful validation. // Returns nil if header is empty or validation fails. -func (b *Basic) VerifyAuthData(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) *models.User { +func (b *Basic) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) *models.User { + // Basic authentication should only fire on API, Download or on Git or LFSPaths + if !middleware.IsAPIPath(req) && !isAttachmentDownload(req) && !isGitRawOrLFSPath(req) { + return nil + } + baHead := req.Header.Get("Authorization") if len(baHead) == 0 { return nil } - auths := strings.Fields(baHead) + auths := strings.SplitN(baHead, " ", 2) if len(auths) != 2 || (auths[0] != "Basic" && auths[0] != "basic") { return nil } - var u *models.User uname, passwd, _ := base.BasicAuthDecode(auths[1]) // Check if username or password is a token @@ -66,24 +70,31 @@ func (b *Basic) VerifyAuthData(req *http.Request, w http.ResponseWriter, store D // Assume username is token authToken := uname if !isUsernameToken { + log.Trace("Basic Authorization: Attempting login for: %s", uname) // Assume password is token authToken = passwd + } else { + log.Trace("Basic Authorization: Attempting login with username as token") } uid := CheckOAuthAccessToken(authToken) if uid != 0 { - var err error - store.GetData()["IsApiToken"] = true + log.Trace("Basic Authorization: Valid OAuthAccessToken for user[%d]", uid) - u, err = models.GetUserByID(uid) + u, err := models.GetUserByID(uid) if err != nil { log.Error("GetUserByID: %v", err) return nil } + + store.GetData()["IsApiToken"] = true + return u } + token, err := models.GetAccessTokenBySHA(authToken) if err == nil { - u, err = models.GetUserByID(token.UID) + log.Trace("Basic Authorization: Valid AccessToken for user[%d]", uid) + u, err := models.GetUserByID(token.UID) if err != nil { log.Error("GetUserByID: %v", err) return nil @@ -93,21 +104,27 @@ func (b *Basic) VerifyAuthData(req *http.Request, w http.ResponseWriter, store D if err = models.UpdateAccessToken(token); err != nil { log.Error("UpdateAccessToken: %v", err) } + + store.GetData()["IsApiToken"] = true + return u } else if !models.IsErrAccessTokenNotExist(err) && !models.IsErrAccessTokenEmpty(err) { log.Error("GetAccessTokenBySha: %v", err) } - if u == nil { - u, err = models.UserSignIn(uname, passwd) - if err != nil { - if !models.IsErrUserNotExist(err) { - log.Error("UserSignIn: %v", err) - } - return nil + if !setting.Service.EnableBasicAuth { + return nil + } + + log.Trace("Basic Authorization: Attempting SignIn for %s", uname) + u, err := models.UserSignIn(uname, passwd) + if err != nil { + if !models.IsErrUserNotExist(err) { + log.Error("UserSignIn: %v", err) } - } else { - store.GetData()["IsApiToken"] = true + return nil } + log.Trace("Basic Authorization: Logged in user %-v", u) + return u } diff --git a/services/auth/group.go b/services/auth/group.go new file mode 100644 index 000000000000..b61949de7dea --- /dev/null +++ b/services/auth/group.go @@ -0,0 +1,73 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package auth + +import ( + "net/http" + + "code.gitea.io/gitea/models" +) + +// Ensure the struct implements the interface. +var ( + _ Auth = &Group{} +) + +// Group implements the Auth interface with serval Auth. +type Group struct { + methods []Auth +} + +// NewGroup creates a new auth group +func NewGroup(methods ...Auth) *Group { + return &Group{ + methods: methods, + } +} + +// Name represents the name of auth method +func (b *Group) Name() string { + return "group" +} + +// Init does nothing as the Basic implementation does not need to allocate any resources +func (b *Group) Init() error { + for _, m := range b.methods { + if err := m.Init(); err != nil { + return err + } + } + return nil +} + +// Free does nothing as the Basic implementation does not have to release any resources +func (b *Group) Free() error { + for _, m := range b.methods { + if err := m.Free(); err != nil { + return err + } + } + return nil +} + +// Verify extracts and validates +func (b *Group) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) *models.User { + if !models.HasEngine { + return nil + } + + // Try to sign in with each of the enabled plugins + for _, ssoMethod := range b.methods { + user := ssoMethod.Verify(req, w, store, sess) + if user != nil { + if store.GetData()["AuthedMethod"] == nil { + store.GetData()["AuthedMethod"] = ssoMethod.Name() + } + return user + } + } + + return nil +} diff --git a/modules/auth/sso/interface.go b/services/auth/interface.go similarity index 71% rename from modules/auth/sso/interface.go rename to services/auth/interface.go index 9b1472f2b37f..a305bdfc226c 100644 --- a/modules/auth/sso/interface.go +++ b/services/auth/interface.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package sso +package auth import ( "net/http" @@ -18,8 +18,10 @@ type DataStore middleware.DataStore // SessionStore represents a session store type SessionStore session.Store -// SingleSignOn represents a SSO authentication method (plugin) for HTTP requests. -type SingleSignOn interface { +// Auth represents an authentication method (plugin) for HTTP requests. +type Auth interface { + Name() string + // Init should be called exactly once before using any of the other methods, // in order to allow the plugin to allocate necessary resources Init() error @@ -28,13 +30,10 @@ type SingleSignOn interface { // give chance to the plugin to free any allocated resources Free() error - // IsEnabled checks if the current SSO method has been enabled in settings. - IsEnabled() bool - - // VerifyAuthData tries to verify the SSO authentication data contained in the request. + // Verify tries to verify the authentication data contained in the request. // If verification is successful returns either an existing user object (with id > 0) // or a new user object (with id = 0) populated with the information that was found // in the authentication data (username or email). // Returns nil if verification fails. - VerifyAuthData(http *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) *models.User + Verify(http *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) *models.User } diff --git a/modules/auth/sso/oauth2.go b/services/auth/oauth2.go similarity index 84% rename from modules/auth/sso/oauth2.go rename to services/auth/oauth2.go index fcd6845b38cc..c6b98c144f0e 100644 --- a/modules/auth/sso/oauth2.go +++ b/services/auth/oauth2.go @@ -3,7 +3,7 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package sso +package auth import ( "net/http" @@ -18,7 +18,7 @@ import ( // Ensure the struct implements the interface. var ( - _ SingleSignOn = &OAuth2{} + _ Auth = &OAuth2{} ) // CheckOAuthAccessToken returns uid of user from oauth token @@ -45,7 +45,7 @@ func CheckOAuthAccessToken(accessToken string) int64 { return grant.UserID } -// OAuth2 implements the SingleSignOn interface and authenticates requests +// OAuth2 implements the Auth interface and authenticates requests // (API requests only) by looking for an OAuth token in query parameters or the // "Authorization" header. type OAuth2 struct { @@ -56,6 +56,11 @@ func (o *OAuth2) Init() error { return nil } +// Name represents the name of auth method +func (o *OAuth2) Name() string { + return "oauth2" +} + // Free does nothing as the OAuth2 implementation does not have to release any resources func (o *OAuth2) Free() error { return nil @@ -107,22 +112,16 @@ func (o *OAuth2) userIDFromToken(req *http.Request, store DataStore) int64 { return t.UID } -// IsEnabled returns true as this plugin is enabled by default and its not possible -// to disable it from settings. -func (o *OAuth2) IsEnabled() bool { - return true -} - -// VerifyAuthData extracts the user ID from the OAuth token in the query parameters +// Verify extracts the user ID from the OAuth token in the query parameters // or the "Authorization" header and returns the corresponding user object for that ID. // If verification is successful returns an existing user object. // Returns nil if verification fails. -func (o *OAuth2) VerifyAuthData(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) *models.User { +func (o *OAuth2) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) *models.User { if !models.HasEngine { return nil } - if middleware.IsInternalPath(req) || !middleware.IsAPIPath(req) && !isAttachmentDownload(req) { + if !middleware.IsAPIPath(req) && !isAttachmentDownload(req) { return nil } @@ -130,6 +129,7 @@ func (o *OAuth2) VerifyAuthData(req *http.Request, w http.ResponseWriter, store if id <= 0 { return nil } + log.Trace("OAuth2 Authorization: Found token for user[%d]", id) user, err := models.GetUserByID(id) if err != nil { @@ -139,5 +139,6 @@ func (o *OAuth2) VerifyAuthData(req *http.Request, w http.ResponseWriter, store return nil } + log.Trace("OAuth2 Authorization: Logged in user %-v", user) return user } diff --git a/services/auth/placeholder.go b/services/auth/placeholder.go new file mode 100644 index 000000000000..50e3061885f8 --- /dev/null +++ b/services/auth/placeholder.go @@ -0,0 +1,9 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +// +build !windows + +package auth + +func specialInit() {} diff --git a/modules/auth/sso/reverseproxy.go b/services/auth/reverseproxy.go similarity index 71% rename from modules/auth/sso/reverseproxy.go rename to services/auth/reverseproxy.go index ca9450e71429..f958d28c9a66 100644 --- a/modules/auth/sso/reverseproxy.go +++ b/services/auth/reverseproxy.go @@ -3,7 +3,7 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package sso +package auth import ( "net/http" @@ -12,16 +12,17 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/web/middleware" gouuid "github.com/google/uuid" ) // Ensure the struct implements the interface. var ( - _ SingleSignOn = &ReverseProxy{} + _ Auth = &ReverseProxy{} ) -// ReverseProxy implements the SingleSignOn interface, but actually relies on +// ReverseProxy implements the Auth interface, but actually relies on // a reverse proxy for authentication of users. // On successful authentication the proxy is expected to populate the username in the // "setting.ReverseProxyAuthUser" header. Optionally it can also populate the email of the @@ -38,6 +39,11 @@ func (r *ReverseProxy) getUserName(req *http.Request) string { return webAuthUser } +// Name represents the name of auth method +func (r *ReverseProxy) Name() string { + return "reverse_proxy" +} + // Init does nothing as the ReverseProxy implementation does not need initialization func (r *ReverseProxy) Init() error { return nil @@ -48,33 +54,38 @@ func (r *ReverseProxy) Free() error { return nil } -// IsEnabled checks if EnableReverseProxyAuth setting is true -func (r *ReverseProxy) IsEnabled() bool { - return setting.Service.EnableReverseProxyAuth -} - -// VerifyAuthData extracts the username from the "setting.ReverseProxyAuthUser" header +// Verify extracts the username from the "setting.ReverseProxyAuthUser" header // of the request and returns the corresponding user object for that name. // Verification of header data is not performed as it should have already been done by // the revese proxy. // If a username is available in the "setting.ReverseProxyAuthUser" header an existing // user object is returned (populated with username or email found in header). // Returns nil if header is empty. -func (r *ReverseProxy) VerifyAuthData(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) *models.User { +func (r *ReverseProxy) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) *models.User { username := r.getUserName(req) if len(username) == 0 { return nil } + log.Trace("ReverseProxy Authorization: Found username: %s", username) user, err := models.GetUserByName(username) if err != nil { - if models.IsErrUserNotExist(err) && r.isAutoRegisterAllowed() { - return r.newUser(req) + if !models.IsErrUserNotExist(err) || !r.isAutoRegisterAllowed() { + log.Error("GetUserByName: %v", err) + return nil } - log.Error("GetUserByName: %v", err) - return nil + user = r.newUser(req) } + // Make sure requests to API paths, attachment downloads, git and LFS do not create a new session + if !middleware.IsAPIPath(req) && !isAttachmentDownload(req) && !isGitRawOrLFSPath(req) { + if sess != nil && (sess.Get("uid") == nil || sess.Get("uid").(int64) != user.ID) { + handleSignIn(w, req, sess, user) + } + } + store.GetData()["IsReverseProxy"] = true + + log.Trace("ReverseProxy Authorization: Logged in user %-v", user) return user } @@ -102,7 +113,6 @@ func (r *ReverseProxy) newUser(req *http.Request) *models.User { user := &models.User{ Name: username, Email: email, - Passwd: username, IsActive: true, } if err := models.CreateUser(user); err != nil { @@ -110,5 +120,6 @@ func (r *ReverseProxy) newUser(req *http.Request) *models.User { log.Error("CreateUser: %v", err) return nil } + return user } diff --git a/services/auth/session.go b/services/auth/session.go new file mode 100644 index 000000000000..9f08f4336300 --- /dev/null +++ b/services/auth/session.go @@ -0,0 +1,75 @@ +// Copyright 2019 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package auth + +import ( + "net/http" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/log" +) + +// Ensure the struct implements the interface. +var ( + _ Auth = &Session{} +) + +// Session checks if there is a user uid stored in the session and returns the user +// object for that uid. +type Session struct { +} + +// Init does nothing as the Session implementation does not need to allocate any resources +func (s *Session) Init() error { + return nil +} + +// Name represents the name of auth method +func (s *Session) Name() string { + return "session" +} + +// Free does nothing as the Session implementation does not have to release any resources +func (s *Session) Free() error { + return nil +} + +// Verify checks if there is a user uid stored in the session and returns the user +// object for that uid. +// Returns nil if there is no user uid stored in the session. +func (s *Session) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) *models.User { + user := SessionUser(sess) + if user != nil { + return user + } + return nil +} + +// SessionUser returns the user object corresponding to the "uid" session variable. +func SessionUser(sess SessionStore) *models.User { + // Get user ID + uid := sess.Get("uid") + if uid == nil { + return nil + } + log.Trace("Session Authorization: Found user[%d]", uid) + + id, ok := uid.(int64) + if !ok { + return nil + } + + // Get user object + user, err := models.GetUserByID(id) + if err != nil { + if !models.IsErrUserNotExist(err) { + log.Error("GetUserById: %v", err) + } + return nil + } + + log.Trace("Session Authorization: Logged in user %-v", user) + return user +} diff --git a/modules/auth/sso/sspi_windows.go b/services/auth/sspi_windows.go similarity index 91% rename from modules/auth/sso/sspi_windows.go rename to services/auth/sspi_windows.go index 46f7ad9d97a0..bb0291d2c9ba 100644 --- a/modules/auth/sso/sspi_windows.go +++ b/services/auth/sspi_windows.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package sso +package auth import ( "errors" @@ -32,7 +32,7 @@ var ( sspiAuth *websspi.Authenticator // Ensure the struct implements the interface. - _ SingleSignOn = &SSPI{} + _ Auth = &SSPI{} ) // SSPI implements the SingleSignOn interface and authenticates requests @@ -62,21 +62,21 @@ func (s *SSPI) Init() error { return nil } +// Name represents the name of auth method +func (s *SSPI) Name() string { + return "sspi" +} + // Free releases resources used by the global websspi.Authenticator object func (s *SSPI) Free() error { return sspiAuth.Free() } -// IsEnabled checks if there is an active SSPI authentication source -func (s *SSPI) IsEnabled() bool { - return models.IsSSPIEnabled() -} - -// VerifyAuthData uses SSPI (Windows implementation of SPNEGO) to authenticate the request. -// If authentication is successful, returs the corresponding user object. +// Verify uses SSPI (Windows implementation of SPNEGO) to authenticate the request. +// If authentication is successful, returns the corresponding user object. // If negotiation should continue or authentication fails, immediately returns a 401 HTTP // response code, as required by the SPNEGO protocol. -func (s *SSPI) VerifyAuthData(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) *models.User { +func (s *SSPI) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) *models.User { if !s.shouldAuthenticate(req) { return nil } @@ -87,6 +87,7 @@ func (s *SSPI) VerifyAuthData(req *http.Request, w http.ResponseWriter, store Da return nil } + log.Trace("SSPI Authorization: Attempting to authenticate") userInfo, outToken, err := sspiAuth.Authenticate(req, w) if err != nil { log.Warn("Authentication failed with error: %v\n", err) @@ -140,6 +141,7 @@ func (s *SSPI) VerifyAuthData(req *http.Request, w http.ResponseWriter, store Da handleSignIn(w, req, sess, user) } + log.Trace("SSPI Authorization: Logged in user %-v", user) return user } @@ -167,8 +169,6 @@ func (s *SSPI) shouldAuthenticate(req *http.Request) (shouldAuth bool) { } else if req.FormValue("auth_with_sspi") == "1" { shouldAuth = true } - } else if middleware.IsInternalPath(req) { - shouldAuth = false } else if middleware.IsAPIPath(req) || isAttachmentDownload(req) { shouldAuth = true } @@ -235,10 +235,12 @@ func sanitizeUsername(username string, cfg *models.SSPIConfig) string { return username } -// init registers the SSPI auth method as the last method in the list. +// specialInit registers the SSPI auth method as the last method in the list. // The SSPI plugin is expected to be executed last, as it returns 401 status code if negotiation // fails (or if negotiation should continue), which would prevent other authentication methods // to execute at all. -func init() { - Register(&SSPI{}) +func specialInit() { + if models.IsSSPIEnabled() { + Register(&SSPI{}) + } } diff --git a/services/forms/admin.go b/services/forms/admin.go index 2e6bbaf17201..5abef0550e39 100644 --- a/services/forms/admin.go +++ b/services/forms/admin.go @@ -8,6 +8,7 @@ import ( "net/http" "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/web/middleware" "gitea.com/go-chi/binding" @@ -22,6 +23,7 @@ type AdminCreateUserForm struct { Password string `binding:"MaxSize(255)"` SendNotify bool MustChangePassword bool + Visibility structs.VisibleType } // Validate validates form fields @@ -49,6 +51,7 @@ type AdminEditUserForm struct { AllowCreateOrganization bool ProhibitLogin bool Reset2FA bool `form:"reset_2fa"` + Visibility structs.VisibleType } // Validate validates form fields diff --git a/services/forms/auth_form.go b/services/forms/auth_form.go index 7cf6b9fcd5ed..30621cadffc2 100644 --- a/services/forms/auth_form.go +++ b/services/forms/auth_form.go @@ -51,6 +51,7 @@ type AuthenticationForm struct { TLS bool SkipVerify bool PAMServiceName string + PAMEmailDomain string Oauth2Provider string Oauth2Key string Oauth2Secret string diff --git a/services/forms/repo_form.go b/services/forms/repo_form.go index 55d1f6e3bc38..7c79c4dc21eb 100644 --- a/services/forms/repo_form.go +++ b/services/forms/repo_form.go @@ -113,18 +113,23 @@ func ParseRemoteAddr(remoteAddr, authUsername, authPassword string) (string, err // RepoSettingForm form for changing repository settings type RepoSettingForm struct { - RepoName string `binding:"Required;AlphaDashDot;MaxSize(100)"` - Description string `binding:"MaxSize(255)"` - Website string `binding:"ValidUrl;MaxSize(255)"` - Interval string - MirrorAddress string - MirrorUsername string - MirrorPassword string - LFS bool `form:"mirror_lfs"` - LFSEndpoint string `form:"mirror_lfs_endpoint"` - Private bool - Template bool - EnablePrune bool + RepoName string `binding:"Required;AlphaDashDot;MaxSize(100)"` + Description string `binding:"MaxSize(255)"` + Website string `binding:"ValidUrl;MaxSize(255)"` + Interval string + MirrorAddress string + MirrorUsername string + MirrorPassword string + LFS bool `form:"mirror_lfs"` + LFSEndpoint string `form:"mirror_lfs_endpoint"` + PushMirrorID string + PushMirrorAddress string + PushMirrorUsername string + PushMirrorPassword string + PushMirrorInterval string + Private bool + Template bool + EnablePrune bool // Advanced settings EnableWiki bool @@ -146,6 +151,7 @@ type RepoSettingForm struct { PullsAllowManualMerge bool PullsDefaultMergeStyle string EnableAutodetectManualMerge bool + DefaultDeleteBranchAfterMerge bool EnableTimetracker bool AllowOnlyContributorsToTrackTime bool EnableIssueDependencies bool @@ -546,11 +552,12 @@ func (f *InitializeLabelsForm) Validate(req *http.Request, errs binding.Errors) type MergePullRequestForm struct { // required: true // enum: merge,rebase,rebase-merge,squash,manually-merged - Do string `binding:"Required;In(merge,rebase,rebase-merge,squash,manually-merged)"` - MergeTitleField string - MergeMessageField string - MergeCommitID string // only used for manually-merged - ForceMerge *bool `json:"force_merge,omitempty"` + Do string `binding:"Required;In(merge,rebase,rebase-merge,squash,manually-merged)"` + MergeTitleField string + MergeMessageField string + MergeCommitID string // only used for manually-merged + ForceMerge *bool `json:"force_merge,omitempty"` + DeleteBranchAfterMerge bool `json:"delete_branch_after_merge,omitempty"` } // Validate validates the fields @@ -582,6 +589,7 @@ type SubmitReviewForm struct { Content string Type string `binding:"Required;In(approve,comment,reject)"` CommitID string + Files []string } // Validate validates the fields diff --git a/services/forms/repo_tag_form.go b/services/forms/repo_tag_form.go new file mode 100644 index 000000000000..337e7fe1ea64 --- /dev/null +++ b/services/forms/repo_tag_form.go @@ -0,0 +1,27 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package forms + +import ( + "net/http" + + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/web/middleware" + + "gitea.com/go-chi/binding" +) + +// ProtectTagForm form for changing protected tag settings +type ProtectTagForm struct { + NamePattern string `binding:"Required;GlobOrRegexPattern"` + AllowlistUsers string + AllowlistTeams string +} + +// Validate validates the fields +func (f *ProtectTagForm) Validate(req *http.Request, errs binding.Errors) binding.Errors { + ctx := context.GetContext(req) + return middleware.Validate(errs, ctx.Data, f, ctx.Locale) +} diff --git a/services/forms/user_form.go b/services/forms/user_form.go index 2c065dc5116a..1e12795c70bf 100644 --- a/services/forms/user_form.go +++ b/services/forms/user_form.go @@ -12,6 +12,7 @@ import ( "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/web/middleware" "gitea.com/go-chi/binding" @@ -226,10 +227,11 @@ type UpdateProfileForm struct { Name string `binding:"AlphaDashDot;MaxSize(40)"` FullName string `binding:"MaxSize(100)"` KeepEmailPrivate bool - Website string `binding:"ValidUrl;MaxSize(255)"` + Website string `binding:"ValidSiteUrl;MaxSize(255)"` Location string `binding:"MaxSize(50)"` Language string Description string `binding:"MaxSize(255)"` + Visibility structs.VisibleType KeepActivityPrivate bool } @@ -324,6 +326,8 @@ type AddKeyForm struct { Type string `binding:"OmitEmpty"` Title string `binding:"Required;MaxSize(50)"` Content string `binding:"Required"` + Signature string `binding:"OmitEmpty"` + KeyID string `binding:"OmitEmpty"` IsWritable bool } diff --git a/services/gitdiff/csv_test.go b/services/gitdiff/csv_test.go index f3dc0c2a2c46..fb84d6ed0632 100644 --- a/services/gitdiff/csv_test.go +++ b/services/gitdiff/csv_test.go @@ -110,13 +110,13 @@ func TestCSVDiff(t *testing.T) { result, err := CreateCsvDiff(diff.Files[0], baseReader, headReader) assert.NoError(t, err) - assert.Equal(t, 1, len(result), "case %d: should be one section", n) + assert.Len(t, result, 1, "case %d: should be one section", n) section := result[0] - assert.Equal(t, len(c.cells), len(section.Rows), "case %d: should be %d rows", n, len(c.cells)) + assert.Len(t, section.Rows, len(c.cells), "case %d: should be %d rows", n, len(c.cells)) for i, row := range section.Rows { - assert.Equal(t, 2, len(row.Cells), "case %d: row %d should have two cells", n, i) + assert.Len(t, row.Cells, 2, "case %d: row %d should have two cells", n, i) for j, cell := range row.Cells { assert.Equal(t, c.cells[i][j], cell.Type, "case %d: row %d cell %d should be equal", n, i, j) } diff --git a/services/gitdiff/gitdiff.go b/services/gitdiff/gitdiff.go index 2ca6bd957ecc..d50e41eb4027 100644 --- a/services/gitdiff/gitdiff.go +++ b/services/gitdiff/gitdiff.go @@ -32,6 +32,7 @@ import ( "github.com/sergi/go-diff/diffmatchpatch" stdcharset "golang.org/x/net/html/charset" + "golang.org/x/text/encoding" "golang.org/x/text/transform" ) @@ -574,21 +575,22 @@ func (diffSection *DiffSection) GetComputedInlineDiffFor(diffLine *DiffLine) tem // DiffFile represents a file diff. type DiffFile struct { - Name string - OldName string - Index int - Addition, Deletion int - Type DiffFileType - IsCreated bool - IsDeleted bool - IsBin bool - IsLFSFile bool - IsRenamed bool - IsAmbiguous bool - IsSubmodule bool - Sections []*DiffSection - IsIncomplete bool - IsProtected bool + Name string + OldName string + Index int + Addition, Deletion int + Type DiffFileType + IsCreated bool + IsDeleted bool + IsBin bool + IsLFSFile bool + IsRenamed bool + IsAmbiguous bool + IsSubmodule bool + Sections []*DiffSection + IsIncomplete bool + IsIncompleteLineTooLong bool + IsProtected bool } // GetType returns type of diff file. @@ -882,35 +884,46 @@ parsingLoop: } - // FIXME: There are numerous issues with this: + // TODO: There are numerous issues with this: // - we might want to consider detecting encoding while parsing but... // - we're likely to fail to get the correct encoding here anyway as we won't have enough information - // - and this doesn't really account for changes in encoding - var buf bytes.Buffer + var diffLineTypeBuffers = make(map[DiffLineType]*bytes.Buffer, 3) + var diffLineTypeDecoders = make(map[DiffLineType]*encoding.Decoder, 3) + diffLineTypeBuffers[DiffLinePlain] = new(bytes.Buffer) + diffLineTypeBuffers[DiffLineAdd] = new(bytes.Buffer) + diffLineTypeBuffers[DiffLineDel] = new(bytes.Buffer) for _, f := range diff.Files { - buf.Reset() + for _, buffer := range diffLineTypeBuffers { + buffer.Reset() + } for _, sec := range f.Sections { for _, l := range sec.Lines { if l.Type == DiffLineSection { continue } - buf.WriteString(l.Content[1:]) - buf.WriteString("\n") + diffLineTypeBuffers[l.Type].WriteString(l.Content[1:]) + diffLineTypeBuffers[l.Type].WriteString("\n") } } - charsetLabel, err := charset.DetectEncoding(buf.Bytes()) - if charsetLabel != "UTF-8" && err == nil { - encoding, _ := stdcharset.Lookup(charsetLabel) - if encoding != nil { - d := encoding.NewDecoder() - for _, sec := range f.Sections { - for _, l := range sec.Lines { - if l.Type == DiffLineSection { - continue - } - if c, _, err := transform.String(d, l.Content[1:]); err == nil { - l.Content = l.Content[0:1] + c - } + for lineType, buffer := range diffLineTypeBuffers { + diffLineTypeDecoders[lineType] = nil + if buffer.Len() == 0 { + continue + } + charsetLabel, err := charset.DetectEncoding(buffer.Bytes()) + if charsetLabel != "UTF-8" && err == nil { + encoding, _ := stdcharset.Lookup(charsetLabel) + if encoding != nil { + diffLineTypeDecoders[lineType] = encoding.NewDecoder() + } + } + } + for _, sec := range f.Sections { + for _, l := range sec.Lines { + decoder := diffLineTypeDecoders[l.Type] + if decoder != nil { + if c, _, err := transform.String(decoder, l.Content[1:]); err == nil { + l.Content = l.Content[0:1] + c } } } @@ -935,6 +948,7 @@ func parseHunks(curFile *DiffFile, maxLines, maxLineCharacters int, input *bufio for { for isFragment { curFile.IsIncomplete = true + curFile.IsIncompleteLineTooLong = true _, isFragment, err = input.ReadLine() if err != nil { // Now by the definition of ReadLine this cannot be io.EOF @@ -1062,6 +1076,7 @@ func parseHunks(curFile *DiffFile, maxLines, maxLineCharacters int, input *bufio line := string(lineBytes) if isFragment { curFile.IsIncomplete = true + curFile.IsIncompleteLineTooLong = true for isFragment { lineBytes, isFragment, err = input.ReadLine() if err != nil { @@ -1073,6 +1088,7 @@ func parseHunks(curFile *DiffFile, maxLines, maxLineCharacters int, input *bufio } if len(line) > maxLineCharacters { curFile.IsIncomplete = true + curFile.IsIncompleteLineTooLong = true line = line[:maxLineCharacters] } curSection.Lines[len(curSection.Lines)-1].Content = line diff --git a/services/gitdiff/gitdiff_test.go b/services/gitdiff/gitdiff_test.go index f8c25a3912d5..2386552efec7 100644 --- a/services/gitdiff/gitdiff_test.go +++ b/services/gitdiff/gitdiff_test.go @@ -13,7 +13,6 @@ import ( "testing" "code.gitea.io/gitea/models" - "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/highlight" "code.gitea.io/gitea/modules/setting" jsoniter "github.com/json-iterator/go" @@ -514,7 +513,6 @@ func TestDiffLine_GetCommentSide(t *testing.T) { } func TestGetDiffRangeWithWhitespaceBehavior(t *testing.T) { - git.Debug = true for _, behavior := range []string{"-w", "--ignore-space-at-eol", "-b", ""} { diffs, err := GetDiffRangeWithWhitespaceBehavior("./testdata/academic-module", "559c156f8e0178b71cb44355428f24001b08fc68", "bd7063cc7c04689c4d082183d32a604ed27a24f9", setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffFiles, behavior) diff --git a/services/issue/assignee.go b/services/issue/assignee.go index f24a242f6b7d..12addede7588 100644 --- a/services/issue/assignee.go +++ b/services/issue/assignee.go @@ -25,7 +25,7 @@ func DeleteNotPassedAssignee(issue *models.Issue, doer *models.User, assignees [ } if !found { - // This function also does comments and hooks, which is why we call it seperatly instead of directly removing the assignees here + // This function also does comments and hooks, which is why we call it separately instead of directly removing the assignees here if _, _, err := ToggleAssignee(issue, doer, assignee.ID); err != nil { return err } diff --git a/services/issue/assignee_test.go b/services/issue/assignee_test.go index bdd2009bf0a8..2d96368ec705 100644 --- a/services/issue/assignee_test.go +++ b/services/issue/assignee_test.go @@ -33,5 +33,5 @@ func TestDeleteNotPassedAssignee(t *testing.T) { // Check they're gone assignees, err := models.GetAssigneesByIssue(issue) assert.NoError(t, err) - assert.Equal(t, 0, len(assignees)) + assert.Empty(t, assignees) } diff --git a/services/lfs/locks.go b/services/lfs/locks.go index 6bbe43d36bbb..20ba12e65bb2 100644 --- a/services/lfs/locks.go +++ b/services/lfs/locks.go @@ -19,30 +19,6 @@ import ( jsoniter "github.com/json-iterator/go" ) -//checkIsValidRequest check if it a valid request in case of bad request it write the response to ctx. -func checkIsValidRequest(ctx *context.Context) bool { - if !setting.LFS.StartServer { - log.Debug("Attempt to access LFS server but LFS server is disabled") - writeStatus(ctx, http.StatusNotFound) - return false - } - if !MetaMatcher(ctx.Req) { - log.Info("Attempt access LOCKs without accepting the correct media type: %s", lfs_module.MediaType) - writeStatus(ctx, http.StatusBadRequest) - return false - } - if !ctx.IsSigned { - user, _, _, err := parseToken(ctx.Req.Header.Get("Authorization")) - if err != nil { - ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs") - writeStatus(ctx, http.StatusUnauthorized) - return false - } - ctx.User = user - } - return true -} - func handleLockListOut(ctx *context.Context, repo *models.Repository, lock *models.LFSLock, err error) { if err != nil { if models.IsErrLFSLockNotExist(err) { @@ -69,23 +45,20 @@ func handleLockListOut(ctx *context.Context, repo *models.Repository, lock *mode // GetListLockHandler list locks func GetListLockHandler(ctx *context.Context) { - if !checkIsValidRequest(ctx) { - // Status is written in checkIsValidRequest - return - } - ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType) - - rv, _ := unpack(ctx) + rv := getRequestContext(ctx) repository, err := models.GetRepositoryByOwnerAndName(rv.User, rv.Repo) if err != nil { log.Debug("Could not find repository: %s/%s - %s", rv.User, rv.Repo, err) - writeStatus(ctx, 404) + ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs") + ctx.JSON(401, api.LFSLockError{ + Message: "You must have pull access to list locks", + }) return } repository.MustOwner() - authenticated := authenticate(ctx, repository, rv.Authorization, false) + authenticated := authenticate(ctx, repository, rv.Authorization, true, false) if !authenticated { ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs") ctx.JSON(http.StatusUnauthorized, api.LFSLockError{ @@ -93,6 +66,7 @@ func GetListLockHandler(ctx *context.Context) { }) return } + ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType) cursor := ctx.QueryInt("cursor") if cursor < 0 { @@ -156,12 +130,6 @@ func GetListLockHandler(ctx *context.Context) { // PostLockHandler create lock func PostLockHandler(ctx *context.Context) { - if !checkIsValidRequest(ctx) { - // Status is written in checkIsValidRequest - return - } - ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType) - userName := ctx.Params("username") repoName := strings.TrimSuffix(ctx.Params("reponame"), ".git") authorization := ctx.Req.Header.Get("Authorization") @@ -169,12 +137,15 @@ func PostLockHandler(ctx *context.Context) { repository, err := models.GetRepositoryByOwnerAndName(userName, repoName) if err != nil { log.Error("Unable to get repository: %s/%s Error: %v", userName, repoName, err) - writeStatus(ctx, 404) + ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs") + ctx.JSON(401, api.LFSLockError{ + Message: "You must have push access to create locks", + }) return } repository.MustOwner() - authenticated := authenticate(ctx, repository, authorization, true) + authenticated := authenticate(ctx, repository, authorization, true, true) if !authenticated { ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs") ctx.JSON(http.StatusUnauthorized, api.LFSLockError{ @@ -183,6 +154,8 @@ func PostLockHandler(ctx *context.Context) { return } + ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType) + var req api.LFSLockRequest bodyReader := ctx.Req.Body defer bodyReader.Close() @@ -225,12 +198,6 @@ func PostLockHandler(ctx *context.Context) { // VerifyLockHandler list locks for verification func VerifyLockHandler(ctx *context.Context) { - if !checkIsValidRequest(ctx) { - // Status is written in checkIsValidRequest - return - } - ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType) - userName := ctx.Params("username") repoName := strings.TrimSuffix(ctx.Params("reponame"), ".git") authorization := ctx.Req.Header.Get("Authorization") @@ -238,12 +205,15 @@ func VerifyLockHandler(ctx *context.Context) { repository, err := models.GetRepositoryByOwnerAndName(userName, repoName) if err != nil { log.Error("Unable to get repository: %s/%s Error: %v", userName, repoName, err) - writeStatus(ctx, 404) + ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs") + ctx.JSON(401, api.LFSLockError{ + Message: "You must have push access to verify locks", + }) return } repository.MustOwner() - authenticated := authenticate(ctx, repository, authorization, true) + authenticated := authenticate(ctx, repository, authorization, true, true) if !authenticated { ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs") ctx.JSON(http.StatusUnauthorized, api.LFSLockError{ @@ -252,6 +222,8 @@ func VerifyLockHandler(ctx *context.Context) { return } + ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType) + cursor := ctx.QueryInt("cursor") if cursor < 0 { cursor = 0 @@ -292,12 +264,6 @@ func VerifyLockHandler(ctx *context.Context) { // UnLockHandler delete locks func UnLockHandler(ctx *context.Context) { - if !checkIsValidRequest(ctx) { - // Status is written in checkIsValidRequest - return - } - ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType) - userName := ctx.Params("username") repoName := strings.TrimSuffix(ctx.Params("reponame"), ".git") authorization := ctx.Req.Header.Get("Authorization") @@ -305,12 +271,15 @@ func UnLockHandler(ctx *context.Context) { repository, err := models.GetRepositoryByOwnerAndName(userName, repoName) if err != nil { log.Error("Unable to get repository: %s/%s Error: %v", userName, repoName, err) - writeStatus(ctx, 404) + ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs") + ctx.JSON(401, api.LFSLockError{ + Message: "You must have push access to delete locks", + }) return } repository.MustOwner() - authenticated := authenticate(ctx, repository, authorization, true) + authenticated := authenticate(ctx, repository, authorization, true, true) if !authenticated { ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs") ctx.JSON(http.StatusUnauthorized, api.LFSLockError{ @@ -319,6 +288,8 @@ func UnLockHandler(ctx *context.Context) { return } + ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType) + var req api.LFSLockDeleteRequest bodyReader := ctx.Req.Body defer bodyReader.Close() diff --git a/services/lfs/server.go b/services/lfs/server.go index cd9a3fd7a159..9954534b5e9a 100644 --- a/services/lfs/server.go +++ b/services/lfs/server.go @@ -1,4 +1,4 @@ -// Copyright 2020 The Gitea Authors. All rights reserved. +// Copyright 2021 The Gitea Authors. All rights reserved. // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. @@ -6,6 +6,7 @@ package lfs import ( "encoding/base64" + "errors" "fmt" "io" "net/http" @@ -39,95 +40,51 @@ type Claims struct { jwt.StandardClaims } -// ObjectLink builds a URL linking to the object. -func (rc *requestContext) ObjectLink(oid string) string { - return setting.AppURL + path.Join(rc.User, rc.Repo+".git", "info/lfs/objects", oid) +// DownloadLink builds a URL to download the object. +func (rc *requestContext) DownloadLink(p lfs_module.Pointer) string { + return setting.AppURL + path.Join(rc.User, rc.Repo+".git", "info/lfs/objects", p.Oid) } -// VerifyLink builds a URL for verifying the object. -func (rc *requestContext) VerifyLink() string { - return setting.AppURL + path.Join(rc.User, rc.Repo+".git", "info/lfs/verify") +// UploadLink builds a URL to upload the object. +func (rc *requestContext) UploadLink(p lfs_module.Pointer) string { + return setting.AppURL + path.Join(rc.User, rc.Repo+".git", "info/lfs/objects", p.Oid, strconv.FormatInt(p.Size, 10)) } -var oidRegExp = regexp.MustCompile(`^[A-Fa-f0-9]+$`) - -func isOidValid(oid string) bool { - return oidRegExp.MatchString(oid) +// VerifyLink builds a URL for verifying the object. +func (rc *requestContext) VerifyLink(p lfs_module.Pointer) string { + return setting.AppURL + path.Join(rc.User, rc.Repo+".git", "info/lfs/verify") } -// ObjectOidHandler is the main request routing entry point into LFS server functions -func ObjectOidHandler(ctx *context.Context) { - if !setting.LFS.StartServer { - log.Debug("Attempt to access LFS server but LFS server is disabled") - writeStatus(ctx, 404) - return - } - - if ctx.Req.Method == "GET" || ctx.Req.Method == "HEAD" { - if MetaMatcher(ctx.Req) { - getMetaHandler(ctx) - return - } +// CheckAcceptMediaType checks if the client accepts the LFS media type. +func CheckAcceptMediaType(ctx *context.Context) { + mediaParts := strings.Split(ctx.Req.Header.Get("Accept"), ";") - getContentHandler(ctx) - return - } else if ctx.Req.Method == "PUT" { - PutHandler(ctx) + if mediaParts[0] != lfs_module.MediaType { + log.Trace("Calling a LFS method without accepting the correct media type: %s", lfs_module.MediaType) + writeStatus(ctx, http.StatusUnsupportedMediaType) return } - - log.Warn("Unhandled LFS method: %s for %s/%s OID[%s]", ctx.Req.Method, ctx.Params("username"), ctx.Params("reponame"), ctx.Params("oid")) - writeStatus(ctx, 404) } -func getAuthenticatedRepoAndMeta(ctx *context.Context, rc *requestContext, p lfs_module.Pointer, requireWrite bool) (*models.LFSMetaObject, *models.Repository) { - if !isOidValid(p.Oid) { - log.Info("Attempt to access invalid LFS OID[%s] in %s/%s", p.Oid, rc.User, rc.Repo) - writeStatus(ctx, 404) - return nil, nil - } - - repository, err := models.GetRepositoryByOwnerAndName(rc.User, rc.Repo) - if err != nil { - log.Error("Unable to get repository: %s/%s Error: %v", rc.User, rc.Repo, err) - writeStatus(ctx, 404) - return nil, nil - } - - if !authenticate(ctx, repository, rc.Authorization, requireWrite) { - requireAuth(ctx) - return nil, nil - } - - meta, err := repository.GetLFSMetaObjectByOid(p.Oid) - if err != nil { - log.Error("Unable to get LFS OID[%s] Error: %v", p.Oid, err) - writeStatus(ctx, 404) - return nil, nil - } - - return meta, repository -} - -// getContentHandler gets the content from the content store -func getContentHandler(ctx *context.Context) { - rc, p := unpack(ctx) +// DownloadHandler gets the content from the content store +func DownloadHandler(ctx *context.Context) { + rc := getRequestContext(ctx) + p := lfs_module.Pointer{Oid: ctx.Params("oid")} - meta, _ := getAuthenticatedRepoAndMeta(ctx, rc, p, false) + meta := getAuthenticatedMeta(ctx, rc, p, false) if meta == nil { - // Status already written in getAuthenticatedRepoAndMeta return } // Support resume download using Range header var fromByte, toByte int64 toByte = meta.Size - 1 - statusCode := 200 + statusCode := http.StatusOK if rangeHdr := ctx.Req.Header.Get("Range"); rangeHdr != "" { regex := regexp.MustCompile(`bytes=(\d+)\-(\d*).*`) match := regex.FindStringSubmatch(rangeHdr) if len(match) > 1 { - statusCode = 206 + statusCode = http.StatusPartialContent fromByte, _ = strconv.ParseInt(match[1], 10, 32) if fromByte >= meta.Size { @@ -150,7 +107,6 @@ func getContentHandler(ctx *context.Context) { contentStore := lfs_module.NewContentStore() content, err := contentStore.Get(meta.Pointer) if err != nil { - // Errors are logged in contentStore.Get writeStatus(ctx, http.StatusNotFound) return } @@ -183,385 +139,305 @@ func getContentHandler(ctx *context.Context) { if written, err := io.CopyN(ctx.Resp, content, contentLength); err != nil { log.Error("Error whilst copying LFS OID[%s] to the response after %d bytes. Error: %v", meta.Oid, written, err) } - logRequest(ctx.Req, statusCode) -} - -// getMetaHandler retrieves metadata about the object -func getMetaHandler(ctx *context.Context) { - rc, p := unpack(ctx) - - meta, _ := getAuthenticatedRepoAndMeta(ctx, rc, p, false) - if meta == nil { - // Status already written in getAuthenticatedRepoAndMeta - return - } - - ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType) - - if ctx.Req.Method == "GET" { - json := jsoniter.ConfigCompatibleWithStandardLibrary - enc := json.NewEncoder(ctx.Resp) - if err := enc.Encode(represent(rc, meta.Pointer, true, false)); err != nil { - log.Error("Failed to encode representation as json. Error: %v", err) - } - } - - logRequest(ctx.Req, 200) } -// PostHandler instructs the client how to upload data -func PostHandler(ctx *context.Context) { - if !setting.LFS.StartServer { - log.Debug("Attempt to access LFS server but LFS server is disabled") - writeStatus(ctx, 404) - return - } - - if !MetaMatcher(ctx.Req) { - log.Info("Attempt to POST without accepting the correct media type: %s", lfs_module.MediaType) - writeStatus(ctx, 400) - return - } - - rc, p := unpack(ctx) - - repository, err := models.GetRepositoryByOwnerAndName(rc.User, rc.Repo) - if err != nil { - log.Error("Unable to get repository: %s/%s Error: %v", rc.User, rc.Repo, err) - writeStatus(ctx, 404) - return - } - - if !authenticate(ctx, repository, rc.Authorization, true) { - requireAuth(ctx) +// BatchHandler provides the batch api +func BatchHandler(ctx *context.Context) { + var br lfs_module.BatchRequest + if err := decodeJSON(ctx.Req, &br); err != nil { + log.Trace("Unable to decode BATCH request vars: Error: %v", err) + writeStatus(ctx, http.StatusBadRequest) return } - if !isOidValid(p.Oid) { - log.Info("Invalid LFS OID[%s] attempt to POST in %s/%s", p.Oid, rc.User, rc.Repo) - writeStatus(ctx, 404) + var isUpload bool + if br.Operation == "upload" { + isUpload = true + } else if br.Operation == "download" { + isUpload = false + } else { + log.Trace("Attempt to BATCH with invalid operation: %s", br.Operation) + writeStatus(ctx, http.StatusBadRequest) return } - if setting.LFS.MaxFileSize > 0 && p.Size > setting.LFS.MaxFileSize { - log.Info("Denied LFS OID[%s] upload of size %d to %s/%s because of LFS_MAX_FILE_SIZE=%d", p.Oid, p.Size, rc.User, rc.Repo, setting.LFS.MaxFileSize) - writeStatus(ctx, 413) - return - } + rc := getRequestContext(ctx) - meta, err := models.NewLFSMetaObject(&models.LFSMetaObject{Pointer: p, RepositoryID: repository.ID}) - if err != nil { - log.Error("Unable to write LFS OID[%s] size %d meta object in %v/%v to database. Error: %v", p.Oid, p.Size, rc.User, rc.Repo, err) - writeStatus(ctx, 404) + repository := getAuthenticatedRepository(ctx, rc, isUpload) + if repository == nil { return } - ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType) - - sentStatus := 202 contentStore := lfs_module.NewContentStore() - exist, err := contentStore.Exists(p) - if err != nil { - log.Error("Unable to check if LFS OID[%s] exist on %s / %s. Error: %v", p.Oid, rc.User, rc.Repo, err) - writeStatus(ctx, 500) - return - } - if meta.Existing && exist { - sentStatus = 200 - } - ctx.Resp.WriteHeader(sentStatus) - - json := jsoniter.ConfigCompatibleWithStandardLibrary - enc := json.NewEncoder(ctx.Resp) - if err := enc.Encode(represent(rc, meta.Pointer, meta.Existing, true)); err != nil { - log.Error("Failed to encode representation as json. Error: %v", err) - } - logRequest(ctx.Req, sentStatus) -} - -// BatchHandler provides the batch api -func BatchHandler(ctx *context.Context) { - if !setting.LFS.StartServer { - log.Debug("Attempt to access LFS server but LFS server is disabled") - writeStatus(ctx, 404) - return - } - - if !MetaMatcher(ctx.Req) { - log.Info("Attempt to BATCH without accepting the correct media type: %s", lfs_module.MediaType) - writeStatus(ctx, 400) - return - } - - bv := unpackbatch(ctx) - - reqCtx := &requestContext{ - User: ctx.Params("username"), - Repo: strings.TrimSuffix(ctx.Params("reponame"), ".git"), - Authorization: ctx.Req.Header.Get("Authorization"), - } var responseObjects []*lfs_module.ObjectResponse - // Create a response object - for _, object := range bv.Objects { - if !isOidValid(object.Oid) { - log.Info("Invalid LFS OID[%s] attempt to BATCH in %s/%s", object.Oid, reqCtx.User, reqCtx.Repo) + for _, p := range br.Objects { + if !p.IsValid() { + responseObjects = append(responseObjects, buildObjectResponse(rc, p, false, false, &lfs_module.ObjectError{ + Code: http.StatusUnprocessableEntity, + Message: "Oid or size are invalid", + })) continue } - repository, err := models.GetRepositoryByOwnerAndName(reqCtx.User, reqCtx.Repo) + exists, err := contentStore.Exists(p) if err != nil { - log.Error("Unable to get repository: %s/%s Error: %v", reqCtx.User, reqCtx.Repo, err) - writeStatus(ctx, 404) + log.Error("Unable to check if LFS OID[%s] exist. Error: %v", p.Oid, rc.User, rc.Repo, err) + writeStatus(ctx, http.StatusInternalServerError) return } - requireWrite := false - if bv.Operation == "upload" { - requireWrite = true - } - - if !authenticate(ctx, repository, reqCtx.Authorization, requireWrite) { - requireAuth(ctx) + meta, err := repository.GetLFSMetaObjectByOid(p.Oid) + if err != nil && err != models.ErrLFSObjectNotExist { + log.Error("Unable to get LFS MetaObject [%s] for %s/%s. Error: %v", p.Oid, rc.User, rc.Repo, err) + writeStatus(ctx, http.StatusInternalServerError) return } - contentStore := lfs_module.NewContentStore() - - meta, err := repository.GetLFSMetaObjectByOid(object.Oid) - if err == nil { // Object is found and exists - exist, err := contentStore.Exists(meta.Pointer) - if err != nil { - log.Error("Unable to check if LFS OID[%s] exist on %s / %s. Error: %v", object.Oid, reqCtx.User, reqCtx.Repo, err) - writeStatus(ctx, 500) - return - } - if exist { - responseObjects = append(responseObjects, represent(reqCtx, meta.Pointer, true, false)) - continue - } + if meta != nil && p.Size != meta.Size { + responseObjects = append(responseObjects, buildObjectResponse(rc, p, false, false, &lfs_module.ObjectError{ + Code: http.StatusUnprocessableEntity, + Message: fmt.Sprintf("Object %s is not %d bytes", p.Oid, p.Size), + })) + continue } - if requireWrite && setting.LFS.MaxFileSize > 0 && object.Size > setting.LFS.MaxFileSize { - log.Info("Denied LFS OID[%s] upload of size %d to %s/%s because of LFS_MAX_FILE_SIZE=%d", object.Oid, object.Size, reqCtx.User, reqCtx.Repo, setting.LFS.MaxFileSize) - writeStatus(ctx, 413) - return - } + var responseObject *lfs_module.ObjectResponse + if isUpload { + var err *lfs_module.ObjectError + if !exists && setting.LFS.MaxFileSize > 0 && p.Size > setting.LFS.MaxFileSize { + err = &lfs_module.ObjectError{ + Code: http.StatusUnprocessableEntity, + Message: fmt.Sprintf("Size must be less than or equal to %d", setting.LFS.MaxFileSize), + } + } - // Object is not found - meta, err = models.NewLFSMetaObject(&models.LFSMetaObject{Pointer: object, RepositoryID: repository.ID}) - if err == nil { - exist, err := contentStore.Exists(meta.Pointer) - if err != nil { - log.Error("Unable to check if LFS OID[%s] exist on %s / %s. Error: %v", object.Oid, reqCtx.User, reqCtx.Repo, err) - writeStatus(ctx, 500) - return + if exists { + if meta == nil { + _, err := models.NewLFSMetaObject(&models.LFSMetaObject{Pointer: p, RepositoryID: repository.ID}) + if err != nil { + log.Error("Unable to create LFS MetaObject [%s] for %s/%s. Error: %v", p.Oid, rc.User, rc.Repo, err) + writeStatus(ctx, http.StatusInternalServerError) + return + } + } } - responseObjects = append(responseObjects, represent(reqCtx, meta.Pointer, meta.Existing, !exist)) + + responseObject = buildObjectResponse(rc, p, false, !exists, err) } else { - log.Error("Unable to write LFS OID[%s] size %d meta object in %v/%v to database. Error: %v", object.Oid, object.Size, reqCtx.User, reqCtx.Repo, err) + var err *lfs_module.ObjectError + if !exists || meta == nil { + err = &lfs_module.ObjectError{ + Code: http.StatusNotFound, + Message: http.StatusText(http.StatusNotFound), + } + } + + responseObject = buildObjectResponse(rc, p, true, false, err) } + responseObjects = append(responseObjects, responseObject) } - ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType) - respobj := &lfs_module.BatchResponse{Objects: responseObjects} - json := jsoniter.ConfigCompatibleWithStandardLibrary - enc := json.NewEncoder(ctx.Resp) + ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType) + + enc := jsoniter.NewEncoder(ctx.Resp) if err := enc.Encode(respobj); err != nil { log.Error("Failed to encode representation as json. Error: %v", err) } - logRequest(ctx.Req, 200) } -// PutHandler receives data from the client and puts it into the content store -func PutHandler(ctx *context.Context) { - rc, p := unpack(ctx) +// UploadHandler receives data from the client and puts it into the content store +func UploadHandler(ctx *context.Context) { + rc := getRequestContext(ctx) - meta, repository := getAuthenticatedRepoAndMeta(ctx, rc, p, true) - if meta == nil { - // Status already written in getAuthenticatedRepoAndMeta + p := lfs_module.Pointer{Oid: ctx.Params("oid")} + var err error + if p.Size, err = strconv.ParseInt(ctx.Params("size"), 10, 64); err != nil { + writeStatusMessage(ctx, http.StatusUnprocessableEntity, err.Error()) + } + + if !p.IsValid() { + log.Trace("Attempt to access invalid LFS OID[%s] in %s/%s", p.Oid, rc.User, rc.Repo) + writeStatus(ctx, http.StatusUnprocessableEntity) + return + } + + repository := getAuthenticatedRepository(ctx, rc, true) + if repository == nil { + return + } + + meta, err := models.NewLFSMetaObject(&models.LFSMetaObject{Pointer: p, RepositoryID: repository.ID}) + if err != nil { + log.Error("Unable to create LFS MetaObject [%s] for %s/%s. Error: %v", p.Oid, rc.User, rc.Repo, err) + writeStatus(ctx, http.StatusInternalServerError) return } contentStore := lfs_module.NewContentStore() + + exists, err := contentStore.Exists(p) + if err != nil { + log.Error("Unable to check if LFS OID[%s] exist. Error: %v", p.Oid, err) + writeStatus(ctx, http.StatusInternalServerError) + return + } + if meta.Existing || exists { + ctx.Resp.WriteHeader(http.StatusOK) + return + } + defer ctx.Req.Body.Close() if err := contentStore.Put(meta.Pointer, ctx.Req.Body); err != nil { - // Put will log the error itself - ctx.Resp.WriteHeader(500) - if err == lfs_module.ErrSizeMismatch || err == lfs_module.ErrHashMismatch { - fmt.Fprintf(ctx.Resp, `{"message":"%s"}`, err) + if errors.Is(err, lfs_module.ErrSizeMismatch) || errors.Is(err, lfs_module.ErrHashMismatch) { + writeStatusMessage(ctx, http.StatusUnprocessableEntity, err.Error()) } else { - fmt.Fprintf(ctx.Resp, `{"message":"Internal Server Error"}`) + writeStatus(ctx, http.StatusInternalServerError) } if _, err = repository.RemoveLFSMetaObjectByOid(p.Oid); err != nil { - log.Error("Whilst removing metaobject for LFS OID[%s] due to preceding error there was another Error: %v", p.Oid, err) + log.Error("Error whilst removing metaobject for LFS OID[%s]: %v", p.Oid, err) } return } - logRequest(ctx.Req, 200) + writeStatus(ctx, http.StatusOK) } // VerifyHandler verify oid and its size from the content store func VerifyHandler(ctx *context.Context) { - if !setting.LFS.StartServer { - log.Debug("Attempt to access LFS server but LFS server is disabled") - writeStatus(ctx, 404) + var p lfs_module.Pointer + if err := decodeJSON(ctx.Req, &p); err != nil { + writeStatus(ctx, http.StatusUnprocessableEntity) return } - if !MetaMatcher(ctx.Req) { - log.Info("Attempt to VERIFY without accepting the correct media type: %s", lfs_module.MediaType) - writeStatus(ctx, 400) - return - } - - rc, p := unpack(ctx) + rc := getRequestContext(ctx) - meta, _ := getAuthenticatedRepoAndMeta(ctx, rc, p, true) + meta := getAuthenticatedMeta(ctx, rc, p, true) if meta == nil { - // Status already written in getAuthenticatedRepoAndMeta return } contentStore := lfs_module.NewContentStore() ok, err := contentStore.Verify(meta.Pointer) + + status := http.StatusOK if err != nil { - // Error will be logged in Verify - ctx.Resp.WriteHeader(500) - fmt.Fprintf(ctx.Resp, `{"message":"Internal Server Error"}`) - return - } - if !ok { - writeStatus(ctx, 422) - return + status = http.StatusInternalServerError + } else if !ok { + status = http.StatusNotFound } + writeStatus(ctx, status) +} - logRequest(ctx.Req, 200) +func decodeJSON(req *http.Request, v interface{}) error { + defer req.Body.Close() + + dec := jsoniter.NewDecoder(req.Body) + return dec.Decode(v) } -// represent takes a requestContext and Meta and turns it into a ObjectResponse suitable -// for json encoding -func represent(rc *requestContext, pointer lfs_module.Pointer, download, upload bool) *lfs_module.ObjectResponse { - rep := &lfs_module.ObjectResponse{ - Pointer: pointer, - Actions: make(map[string]*lfs_module.Link), +func getRequestContext(ctx *context.Context) *requestContext { + return &requestContext{ + User: ctx.Params("username"), + Repo: strings.TrimSuffix(ctx.Params("reponame"), ".git"), + Authorization: ctx.Req.Header.Get("Authorization"), } +} - header := make(map[string]string) - - if rc.Authorization == "" { - //https://github.com/github/git-lfs/issues/1088 - header["Authorization"] = "Authorization: Basic dummy" - } else { - header["Authorization"] = rc.Authorization +func getAuthenticatedMeta(ctx *context.Context, rc *requestContext, p lfs_module.Pointer, requireWrite bool) *models.LFSMetaObject { + if !p.IsValid() { + log.Info("Attempt to access invalid LFS OID[%s] in %s/%s", p.Oid, rc.User, rc.Repo) + writeStatusMessage(ctx, http.StatusUnprocessableEntity, "Oid or size are invalid") + return nil } - if download { - rep.Actions["download"] = &lfs_module.Link{Href: rc.ObjectLink(pointer.Oid), Header: header} + repository := getAuthenticatedRepository(ctx, rc, requireWrite) + if repository == nil { + return nil } - if upload { - rep.Actions["upload"] = &lfs_module.Link{Href: rc.ObjectLink(pointer.Oid), Header: header} + meta, err := repository.GetLFSMetaObjectByOid(p.Oid) + if err != nil { + log.Error("Unable to get LFS OID[%s] Error: %v", p.Oid, err) + writeStatus(ctx, http.StatusNotFound) + return nil } - if upload && !download { - // Force client side verify action while gitea lacks proper server side verification - verifyHeader := make(map[string]string) - for k, v := range header { - verifyHeader[k] = v - } + return meta +} - // This is only needed to workaround https://github.com/git-lfs/git-lfs/issues/3662 - verifyHeader["Accept"] = lfs_module.MediaType +func getAuthenticatedRepository(ctx *context.Context, rc *requestContext, requireWrite bool) *models.Repository { + repository, err := models.GetRepositoryByOwnerAndName(rc.User, rc.Repo) + if err != nil { + log.Error("Unable to get repository: %s/%s Error: %v", rc.User, rc.Repo, err) + writeStatus(ctx, http.StatusNotFound) + return nil + } - rep.Actions["verify"] = &lfs_module.Link{Href: rc.VerifyLink(), Header: verifyHeader} + if !authenticate(ctx, repository, rc.Authorization, false, requireWrite) { + requireAuth(ctx) + return nil } - return rep + return repository } -// MetaMatcher provides a mux.MatcherFunc that only allows requests that contain -// an Accept header with the lfs_module.MediaType -func MetaMatcher(r *http.Request) bool { - mediaParts := strings.Split(r.Header.Get("Accept"), ";") - mt := mediaParts[0] - return mt == lfs_module.MediaType -} +func buildObjectResponse(rc *requestContext, pointer lfs_module.Pointer, download, upload bool, err *lfs_module.ObjectError) *lfs_module.ObjectResponse { + rep := &lfs_module.ObjectResponse{Pointer: pointer} + if err != nil { + rep.Error = err + } else { + rep.Actions = make(map[string]*lfs_module.Link) -func unpack(ctx *context.Context) (*requestContext, lfs_module.Pointer) { - r := ctx.Req - rc := &requestContext{ - User: ctx.Params("username"), - Repo: strings.TrimSuffix(ctx.Params("reponame"), ".git"), - Authorization: r.Header.Get("Authorization"), - } - p := lfs_module.Pointer{Oid: ctx.Params("oid")} + header := make(map[string]string) - if r.Method == "POST" { // Maybe also check if +json - var p2 lfs_module.Pointer - bodyReader := r.Body - defer bodyReader.Close() - json := jsoniter.ConfigCompatibleWithStandardLibrary - dec := json.NewDecoder(bodyReader) - err := dec.Decode(&p2) - if err != nil { - // The error is logged as a WARN here because this may represent misbehaviour rather than a true error - log.Warn("Unable to decode POST request vars for LFS OID[%s] in %s/%s: Error: %v", p.Oid, rc.User, rc.Repo, err) - return rc, p + if len(rc.Authorization) > 0 { + header["Authorization"] = rc.Authorization } - p.Oid = p2.Oid - p.Size = p2.Size - } - - return rc, p -} + if download { + rep.Actions["download"] = &lfs_module.Link{Href: rc.DownloadLink(pointer), Header: header} + } + if upload { + rep.Actions["upload"] = &lfs_module.Link{Href: rc.UploadLink(pointer), Header: header} -// TODO cheap hack, unify with unpack -func unpackbatch(ctx *context.Context) *lfs_module.BatchRequest { + verifyHeader := make(map[string]string) + for key, value := range header { + verifyHeader[key] = value + } - r := ctx.Req - var bv lfs_module.BatchRequest + // This is only needed to workaround https://github.com/git-lfs/git-lfs/issues/3662 + verifyHeader["Accept"] = lfs_module.MediaType - bodyReader := r.Body - defer bodyReader.Close() - json := jsoniter.ConfigCompatibleWithStandardLibrary - dec := json.NewDecoder(bodyReader) - err := dec.Decode(&bv) - if err != nil { - // The error is logged as a WARN here because this may represent misbehaviour rather than a true error - log.Warn("Unable to decode BATCH request vars in %s/%s: Error: %v", ctx.Params("username"), strings.TrimSuffix(ctx.Params("reponame"), ".git"), err) - return &bv + rep.Actions["verify"] = &lfs_module.Link{Href: rc.VerifyLink(pointer), Header: verifyHeader} + } } - - return &bv + return rep } func writeStatus(ctx *context.Context, status int) { - message := http.StatusText(status) - - mediaParts := strings.Split(ctx.Req.Header.Get("Accept"), ";") - mt := mediaParts[0] - if strings.HasSuffix(mt, "+json") { - message = `{"message":"` + message + `"}` - } + writeStatusMessage(ctx, status, http.StatusText(status)) +} +func writeStatusMessage(ctx *context.Context, status int, message string) { + ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType) ctx.Resp.WriteHeader(status) - fmt.Fprint(ctx.Resp, message) - logRequest(ctx.Req, status) -} -func logRequest(r *http.Request, status int) { - log.Debug("LFS request - Method: %s, URL: %s, Status %d", r.Method, r.URL, status) + er := lfs_module.ErrorResponse{Message: message} + + enc := jsoniter.NewEncoder(ctx.Resp) + if err := enc.Encode(er); err != nil { + log.Error("Failed to encode error response as json. Error: %v", err) + } } // authenticate uses the authorization string to determine whether // or not to proceed. This server assumes an HTTP Basic auth format. -func authenticate(ctx *context.Context, repository *models.Repository, authorization string, requireWrite bool) bool { +func authenticate(ctx *context.Context, repository *models.Repository, authorization string, requireSigned, requireWrite bool) bool { accessMode := models.AccessModeRead if requireWrite { accessMode = models.AccessModeWrite @@ -575,92 +451,75 @@ func authenticate(ctx *context.Context, repository *models.Repository, authoriza } canRead := perm.CanAccess(accessMode, models.UnitTypeCode) - if canRead { + if canRead && (!requireSigned || ctx.IsSigned) { return true } - user, repo, opStr, err := parseToken(authorization) + user, err := parseToken(authorization, repository, accessMode) if err != nil { // Most of these are Warn level - the true internal server errors are logged in parseToken already log.Warn("Authentication failure for provided token with Error: %v", err) return false } ctx.User = user - if opStr == "basic" { - perm, err = models.GetUserRepoPermission(repository, ctx.User) - if err != nil { - log.Error("Unable to GetUserRepoPermission for user %-v in repo %-v Error: %v", ctx.User, repository) - return false - } - return perm.CanAccess(accessMode, models.UnitTypeCode) + return true +} + +func handleLFSToken(tokenSHA string, target *models.Repository, mode models.AccessMode) (*models.User, error) { + if !strings.Contains(tokenSHA, ".") { + return nil, nil } - if repository.ID == repo.ID { - if requireWrite && opStr != "upload" { - return false + token, err := jwt.ParseWithClaims(tokenSHA, &Claims{}, func(t *jwt.Token) (interface{}, error) { + if _, ok := t.Method.(*jwt.SigningMethodHMAC); !ok { + return nil, fmt.Errorf("unexpected signing method: %v", t.Header["alg"]) } - return true + return setting.LFS.JWTSecretBytes, nil + }) + if err != nil { + return nil, nil } - return false -} -func parseToken(authorization string) (*models.User, *models.Repository, string, error) { - if authorization == "" { - return nil, nil, "unknown", fmt.Errorf("No token") + claims, claimsOk := token.Claims.(*Claims) + if !token.Valid || !claimsOk { + return nil, fmt.Errorf("invalid token claim") } - if strings.HasPrefix(authorization, "Bearer ") { - token, err := jwt.ParseWithClaims(authorization[7:], &Claims{}, func(t *jwt.Token) (interface{}, error) { - if _, ok := t.Method.(*jwt.SigningMethodHMAC); !ok { - return nil, fmt.Errorf("unexpected signing method: %v", t.Header["alg"]) - } - return setting.LFS.JWTSecretBytes, nil - }) - if err != nil { - // The error here is WARN level because it is caused by bad authorization rather than an internal server error - return nil, nil, "unknown", err - } - claims, claimsOk := token.Claims.(*Claims) - if !token.Valid || !claimsOk { - return nil, nil, "unknown", fmt.Errorf("Token claim invalid") - } - r, err := models.GetRepositoryByID(claims.RepoID) - if err != nil { - log.Error("Unable to GetRepositoryById[%d]: Error: %v", claims.RepoID, err) - return nil, nil, claims.Op, err - } - u, err := models.GetUserByID(claims.UserID) - if err != nil { - log.Error("Unable to GetUserById[%d]: Error: %v", claims.UserID, err) - return nil, r, claims.Op, err - } - return u, r, claims.Op, nil + + if claims.RepoID != target.ID { + return nil, fmt.Errorf("invalid token claim") } - if strings.HasPrefix(authorization, "Basic ") { - c, err := base64.StdEncoding.DecodeString(strings.TrimPrefix(authorization, "Basic ")) - if err != nil { - return nil, nil, "basic", err - } - cs := string(c) - i := strings.IndexByte(cs, ':') - if i < 0 { - return nil, nil, "basic", fmt.Errorf("Basic auth invalid") - } - user, password := cs[:i], cs[i+1:] - u, err := models.GetUserByName(user) - if err != nil { - log.Error("Unable to GetUserByName[%d]: Error: %v", user, err) - return nil, nil, "basic", err - } - if !u.IsPasswordSet() || !u.ValidatePassword(password) { - return nil, nil, "basic", fmt.Errorf("Basic auth failed") - } - return u, nil, "basic", nil + if mode == models.AccessModeWrite && claims.Op != "upload" { + return nil, fmt.Errorf("invalid token claim") + } + + u, err := models.GetUserByID(claims.UserID) + if err != nil { + log.Error("Unable to GetUserById[%d]: Error: %v", claims.UserID, err) + return nil, err } + return u, nil +} - return nil, nil, "unknown", fmt.Errorf("Token not found") +func parseToken(authorization string, target *models.Repository, mode models.AccessMode) (*models.User, error) { + if authorization == "" { + return nil, fmt.Errorf("no token") + } + + parts := strings.SplitN(authorization, " ", 2) + if len(parts) != 2 { + return nil, fmt.Errorf("no token") + } + tokenSHA := parts[1] + switch strings.ToLower(parts[0]) { + case "bearer": + fallthrough + case "token": + return handleLFSToken(tokenSHA, target, mode) + } + return nil, fmt.Errorf("token not found") } func requireAuth(ctx *context.Context) { ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs") - writeStatus(ctx, 401) + writeStatus(ctx, http.StatusUnauthorized) } diff --git a/services/mailer/mail.go b/services/mailer/mail.go index f22140c9f762..14512d7d6560 100644 --- a/services/mailer/mail.go +++ b/services/mailer/mail.go @@ -11,6 +11,7 @@ import ( "html/template" "mime" "regexp" + "strconv" "strings" texttmpl "text/template" @@ -21,6 +22,7 @@ import ( "code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/markup/markdown" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/translation" @@ -66,13 +68,15 @@ func sendUserMail(language string, u *models.User, tpl base.TplName, code, subje "ActiveCodeLives": timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, language), "ResetPwdCodeLives": timeutil.MinutesToFriendly(setting.Service.ResetPwdCodeLives, language), "Code": code, - "i18n": locale, "Language": locale.Language(), + // helper + "i18n": locale, + "Str2html": templates.Str2html, + "TrN": templates.TrN, } var content bytes.Buffer - // TODO: i18n templates? if err := bodyTemplates.ExecuteTemplate(&content, string(tpl), data); err != nil { log.Error("Template: %v", err) return @@ -103,13 +107,15 @@ func SendActivateEmailMail(u *models.User, email *models.EmailAddress) { "ActiveCodeLives": timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, locale.Language()), "Code": u.GenerateEmailActivateCode(email.Email), "Email": email.Email, - "i18n": locale, "Language": locale.Language(), + // helper + "i18n": locale, + "Str2html": templates.Str2html, + "TrN": templates.TrN, } var content bytes.Buffer - // TODO: i18n templates? if err := bodyTemplates.ExecuteTemplate(&content, string(mailAuthActivateEmail), data); err != nil { log.Error("Template: %v", err) return @@ -128,13 +134,15 @@ func SendRegisterNotifyMail(u *models.User) { data := map[string]interface{}{ "DisplayName": u.DisplayName(), "Username": u.Name, - "i18n": locale, "Language": locale.Language(), + // helper + "i18n": locale, + "Str2html": templates.Str2html, + "TrN": templates.TrN, } var content bytes.Buffer - // TODO: i18n templates? if err := bodyTemplates.ExecuteTemplate(&content, string(mailAuthRegisterNotify), data); err != nil { log.Error("Template: %v", err) return @@ -156,13 +164,15 @@ func SendCollaboratorMail(u, doer *models.User, repo *models.Repository) { "Subject": subject, "RepoName": repoName, "Link": repo.HTMLURL(), - "i18n": locale, "Language": locale.Language(), + // helper + "i18n": locale, + "Str2html": templates.Str2html, + "TrN": templates.TrN, } var content bytes.Buffer - // TODO: i18n templates? if err := bodyTemplates.ExecuteTemplate(&content, string(mailNotifyCollaborator), data); err != nil { log.Error("Template: %v", err) return @@ -174,7 +184,7 @@ func SendCollaboratorMail(u, doer *models.User, repo *models.Repository) { SendAsync(msg) } -func composeIssueCommentMessages(ctx *mailCommentContext, lang string, tos []string, fromMention bool, info string) ([]*Message, error) { +func composeIssueCommentMessages(ctx *mailCommentContext, lang string, recipients []*models.User, fromMention bool, info string) ([]*Message, error) { var ( subject string link string @@ -238,12 +248,14 @@ func composeIssueCommentMessages(ctx *mailCommentContext, lang string, tos []str "ActionType": actType, "ActionName": actName, "ReviewComments": reviewComments, - "i18n": locale, "Language": locale.Language(), + // helper + "i18n": locale, + "Str2html": templates.Str2html, + "TrN": templates.TrN, } var mailSubject bytes.Buffer - // TODO: i18n templates? if err := subjectTemplates.ExecuteTemplate(&mailSubject, string(tplName), mailMeta); err == nil { subject = sanitizeSubject(mailSubject.String()) if subject == "" { @@ -259,15 +271,14 @@ func composeIssueCommentMessages(ctx *mailCommentContext, lang string, tos []str var mailBody bytes.Buffer - // TODO: i18n templates? if err := bodyTemplates.ExecuteTemplate(&mailBody, string(tplName), mailMeta); err != nil { log.Error("ExecuteTemplate [%s]: %v", string(tplName)+"/body", err) } // Make sure to compose independent messages to avoid leaking user emails - msgs := make([]*Message, 0, len(tos)) - for _, to := range tos { - msg := NewMessageFrom([]string{to}, ctx.Doer.DisplayName(), setting.MailService.FromEmail, subject, mailBody.String()) + msgs := make([]*Message, 0, len(recipients)) + for _, recipient := range recipients { + msg := NewMessageFrom([]string{recipient.Email}, ctx.Doer.DisplayName(), setting.MailService.FromEmail, subject, mailBody.String()) msg.Info = fmt.Sprintf("Subject: %s, %s", subject, info) // Set Message-ID on first message so replies know what to reference @@ -277,12 +288,51 @@ func composeIssueCommentMessages(ctx *mailCommentContext, lang string, tos []str msg.SetHeader("In-Reply-To", "<"+ctx.Issue.ReplyReference()+">") msg.SetHeader("References", "<"+ctx.Issue.ReplyReference()+">") } + + for key, value := range generateAdditionalHeaders(ctx, actType, recipient) { + msg.SetHeader(key, value) + } + msgs = append(msgs, msg) } return msgs, nil } +func generateAdditionalHeaders(ctx *mailCommentContext, reason string, recipient *models.User) map[string]string { + repo := ctx.Issue.Repo + + return map[string]string{ + // https://datatracker.ietf.org/doc/html/rfc2919 + "List-ID": fmt.Sprintf("%s <%s.%s.%s>", repo.FullName(), repo.Name, repo.OwnerName, setting.Domain), + + // https://datatracker.ietf.org/doc/html/rfc2369 + "List-Archive": fmt.Sprintf("<%s>", repo.HTMLURL()), + //"List-Post": https://github.com/go-gitea/gitea/pull/13585 + //"List-Unsubscribe": https://github.com/go-gitea/gitea/issues/10808, https://github.com/go-gitea/gitea/issues/13283 + + "X-Gitea-Reason": reason, + "X-Gitea-Sender": ctx.Doer.DisplayName(), + "X-Gitea-Recipient": recipient.DisplayName(), + "X-Gitea-Recipient-Address": recipient.Email, + "X-Gitea-Repository": repo.Name, + "X-Gitea-Repository-Path": repo.FullName(), + "X-Gitea-Repository-Link": repo.HTMLURL(), + "X-Gitea-Issue-ID": strconv.FormatInt(ctx.Issue.Index, 10), + "X-Gitea-Issue-Link": ctx.Issue.HTMLURL(), + + "X-GitHub-Reason": reason, + "X-GitHub-Sender": ctx.Doer.DisplayName(), + "X-GitHub-Recipient": recipient.DisplayName(), + "X-GitHub-Recipient-Address": recipient.Email, + + "X-GitLab-NotificationReason": reason, + "X-GitLab-Project": repo.Name, + "X-GitLab-Project-Path": repo.FullName(), + "X-GitLab-Issue-IID": strconv.FormatInt(ctx.Issue.Index, 10), + } +} + func sanitizeSubject(subject string) string { runes := []rune(strings.TrimSpace(subjectRemoveSpaces.ReplaceAllLiteralString(subject, " "))) if len(runes) > mailMaxSubjectRunes { @@ -294,9 +344,9 @@ func sanitizeSubject(subject string) string { // SendIssueAssignedMail composes and sends issue assigned email func SendIssueAssignedMail(issue *models.Issue, doer *models.User, content string, comment *models.Comment, recipients []*models.User) error { - langMap := make(map[string][]string) + langMap := make(map[string][]*models.User) for _, user := range recipients { - langMap[user.Language] = append(langMap[user.Language], user.Email) + langMap[user.Language] = append(langMap[user.Language], user) } for lang, tos := range langMap { @@ -337,6 +387,8 @@ func actionToTemplate(issue *models.Issue, actionType models.ActionType, name = "merge" case models.ActionPullReviewDismissed: name = "review_dismissed" + case models.ActionPullRequestReadyForReview: + name = "ready_for_review" default: switch commentType { case models.CommentTypeReview: diff --git a/services/mailer/mail_comment.go b/services/mailer/mail_comment.go index f73c9fb6372f..eca05cef2960 100644 --- a/services/mailer/mail_comment.go +++ b/services/mailer/mail_comment.go @@ -11,12 +11,16 @@ import ( // MailParticipantsComment sends new comment emails to repository watchers and mentioned people. func MailParticipantsComment(c *models.Comment, opType models.ActionType, issue *models.Issue, mentions []*models.User) error { + content := c.Content + if c.Type == models.CommentTypePullPush { + content = "" + } if err := mailIssueCommentToParticipants( &mailCommentContext{ Issue: issue, Doer: c.Poster, ActionType: opType, - Content: c.Content, + Content: content, Comment: c, }, mentions); err != nil { log.Error("mailIssueCommentToParticipants: %v", err) diff --git a/services/mailer/mail_issue.go b/services/mailer/mail_issue.go index bb541d27a091..6ffc08c8c089 100644 --- a/services/mailer/mail_issue.go +++ b/services/mailer/mail_issue.go @@ -30,7 +30,7 @@ const ( // mailIssueCommentToParticipants can be used for both new issue creation and comment. // This function sends two list of emails: -// 1. Repository watchers and users who are participated in comments. +// 1. Repository watchers (except for WIP pull requests) and users who are participated in comments. // 2. Users who are not in 1. but get mentioned in current issue/comment. func mailIssueCommentToParticipants(ctx *mailCommentContext, mentions []*models.User) error { @@ -74,11 +74,13 @@ func mailIssueCommentToParticipants(ctx *mailCommentContext, mentions []*models. // =========== Repo watchers =========== // Make repo watchers last, since it's likely the list with the most users - ids, err = models.GetRepoWatchersIDs(ctx.Issue.RepoID) - if err != nil { - return fmt.Errorf("GetRepoWatchersIDs(%d): %v", ctx.Issue.RepoID, err) + if !(ctx.Issue.IsPull && ctx.Issue.PullRequest.IsWorkInProgress() && ctx.ActionType != models.ActionCreatePullRequest) { + ids, err = models.GetRepoWatchersIDs(ctx.Issue.RepoID) + if err != nil { + return fmt.Errorf("GetRepoWatchersIDs(%d): %v", ctx.Issue.RepoID, err) + } + unfiltered = append(ids, unfiltered...) } - unfiltered = append(ids, unfiltered...) visited := make(map[int64]bool, len(unfiltered)+len(mentions)+1) @@ -116,7 +118,7 @@ func mailIssueCommentBatch(ctx *mailCommentContext, users []*models.User, visite checkUnit = models.UnitTypePullRequests } - langMap := make(map[string][]string) + langMap := make(map[string][]*models.User) for _, user := range users { // At this point we exclude: // user that don't have all mails enabled or users only get mail on mention and this is one ... @@ -138,7 +140,7 @@ func mailIssueCommentBatch(ctx *mailCommentContext, users []*models.User, visite continue } - langMap[user.Language] = append(langMap[user.Language], user.Email) + langMap[user.Language] = append(langMap[user.Language], user) } for lang, receivers := range langMap { @@ -161,12 +163,18 @@ func mailIssueCommentBatch(ctx *mailCommentContext, users []*models.User, visite // MailParticipants sends new issue thread created emails to repository watchers // and mentioned people. func MailParticipants(issue *models.Issue, doer *models.User, opType models.ActionType, mentions []*models.User) error { + content := issue.Content + if opType == models.ActionCloseIssue || opType == models.ActionClosePullRequest || + opType == models.ActionReopenIssue || opType == models.ActionReopenPullRequest || + opType == models.ActionMergePullRequest { + content = "" + } if err := mailIssueCommentToParticipants( &mailCommentContext{ Issue: issue, Doer: doer, ActionType: opType, - Content: issue.Content, + Content: content, Comment: nil, }, mentions); err != nil { log.Error("mailIssueCommentToParticipants: %v", err) diff --git a/services/mailer/mail_release.go b/services/mailer/mail_release.go index 1e12fe13acde..f92d3a78fa54 100644 --- a/services/mailer/mail_release.go +++ b/services/mailer/mail_release.go @@ -13,6 +13,7 @@ import ( "code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/markup/markdown" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/translation" ) @@ -63,13 +64,15 @@ func mailNewRelease(lang string, tos []string, rel *models.Release) { mailMeta := map[string]interface{}{ "Release": rel, "Subject": subject, - "i18n": locale, "Language": locale.Language(), + // helper + "i18n": locale, + "Str2html": templates.Str2html, + "TrN": templates.TrN, } var mailBody bytes.Buffer - // TODO: i18n templates? if err := bodyTemplates.ExecuteTemplate(&mailBody, string(tplNewReleaseMail), mailMeta); err != nil { log.Error("ExecuteTemplate [%s]: %v", string(tplNewReleaseMail)+"/body", err) return diff --git a/services/mailer/mail_repo.go b/services/mailer/mail_repo.go index c742101ee196..4e629ee5c76b 100644 --- a/services/mailer/mail_repo.go +++ b/services/mailer/mail_repo.go @@ -9,6 +9,7 @@ import ( "fmt" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/translation" ) @@ -57,12 +58,14 @@ func sendRepoTransferNotifyMailPerLang(lang string, newOwner, doer *models.User, "Repo": repo.FullName(), "Link": repo.HTMLURL(), "Subject": subject, - "i18n": locale, "Language": locale.Language(), "Destination": destination, + // helper + "i18n": locale, + "Str2html": templates.Str2html, + "TrN": templates.TrN, } - // TODO: i18n templates? if err := bodyTemplates.ExecuteTemplate(&content, string(mailRepoTransferNotify), data); err != nil { return err } diff --git a/services/mailer/mail_test.go b/services/mailer/mail_test.go index 813e51c0d215..0a9112f3be59 100644 --- a/services/mailer/mail_test.go +++ b/services/mailer/mail_test.go @@ -39,7 +39,7 @@ const bodyTpl = ` ` -func TestComposeIssueCommentMessage(t *testing.T) { +func prepareMailerTest(t *testing.T) (doer *models.User, repo *models.Repository, issue *models.Issue, comment *models.Comment) { assert.NoError(t, models.PrepareTestDatabase()) var mailService = setting.Mailer{ From: "test@gitea.com", @@ -48,18 +48,24 @@ func TestComposeIssueCommentMessage(t *testing.T) { setting.MailService = &mailService setting.Domain = "localhost" - doer := models.AssertExistsAndLoadBean(t, &models.User{ID: 2}).(*models.User) - repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1, Owner: doer}).(*models.Repository) - issue := models.AssertExistsAndLoadBean(t, &models.Issue{ID: 1, Repo: repo, Poster: doer}).(*models.Issue) - comment := models.AssertExistsAndLoadBean(t, &models.Comment{ID: 2, Issue: issue}).(*models.Comment) + doer = models.AssertExistsAndLoadBean(t, &models.User{ID: 2}).(*models.User) + repo = models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1, Owner: doer}).(*models.Repository) + issue = models.AssertExistsAndLoadBean(t, &models.Issue{ID: 1, Repo: repo, Poster: doer}).(*models.Issue) + assert.NoError(t, issue.LoadRepo()) + comment = models.AssertExistsAndLoadBean(t, &models.Comment{ID: 2, Issue: issue}).(*models.Comment) + return +} + +func TestComposeIssueCommentMessage(t *testing.T) { + doer, _, issue, comment := prepareMailerTest(t) stpl := texttmpl.Must(texttmpl.New("issue/comment").Parse(subjectTpl)) btpl := template.Must(template.New("issue/comment").Parse(bodyTpl)) InitMailRender(stpl, btpl) - tos := []string{"test@gitea.com", "test2@gitea.com"} + recipients := []*models.User{{Name: "Test", Email: "test@gitea.com"}, {Name: "Test2", Email: "test2@gitea.com"}} msgs, err := composeIssueCommentMessages(&mailCommentContext{Issue: issue, Doer: doer, ActionType: models.ActionCommentIssue, - Content: "test body", Comment: comment}, "en-US", tos, false, "issue comment") + Content: "test body", Comment: comment}, "en-US", recipients, false, "issue comment") assert.NoError(t, err) assert.Len(t, msgs, 2) gomailMsg := msgs[0].ToMessage() @@ -76,25 +82,15 @@ func TestComposeIssueCommentMessage(t *testing.T) { } func TestComposeIssueMessage(t *testing.T) { - assert.NoError(t, models.PrepareTestDatabase()) - var mailService = setting.Mailer{ - From: "test@gitea.com", - } - - setting.MailService = &mailService - setting.Domain = "localhost" - - doer := models.AssertExistsAndLoadBean(t, &models.User{ID: 2}).(*models.User) - repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1, Owner: doer}).(*models.Repository) - issue := models.AssertExistsAndLoadBean(t, &models.Issue{ID: 1, Repo: repo, Poster: doer}).(*models.Issue) + doer, _, issue, _ := prepareMailerTest(t) stpl := texttmpl.Must(texttmpl.New("issue/new").Parse(subjectTpl)) btpl := template.Must(template.New("issue/new").Parse(bodyTpl)) InitMailRender(stpl, btpl) - tos := []string{"test@gitea.com", "test2@gitea.com"} + recipients := []*models.User{{Name: "Test", Email: "test@gitea.com"}, {Name: "Test2", Email: "test2@gitea.com"}} msgs, err := composeIssueCommentMessages(&mailCommentContext{Issue: issue, Doer: doer, ActionType: models.ActionCreateIssue, - Content: "test body"}, "en-US", tos, false, "issue create") + Content: "test body"}, "en-US", recipients, false, "issue create") assert.NoError(t, err) assert.Len(t, msgs, 2) @@ -111,18 +107,8 @@ func TestComposeIssueMessage(t *testing.T) { } func TestTemplateSelection(t *testing.T) { - assert.NoError(t, models.PrepareTestDatabase()) - var mailService = setting.Mailer{ - From: "test@gitea.com", - } - - setting.MailService = &mailService - setting.Domain = "localhost" - - doer := models.AssertExistsAndLoadBean(t, &models.User{ID: 2}).(*models.User) - repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1, Owner: doer}).(*models.Repository) - issue := models.AssertExistsAndLoadBean(t, &models.Issue{ID: 1, Repo: repo, Poster: doer}).(*models.Issue) - tos := []string{"test@gitea.com"} + doer, repo, issue, comment := prepareMailerTest(t) + recipients := []*models.User{{Name: "Test", Email: "test@gitea.com"}} stpl := texttmpl.Must(texttmpl.New("issue/default").Parse("issue/default/subject")) texttmpl.Must(stpl.New("issue/new").Parse("issue/new/subject")) @@ -146,38 +132,26 @@ func TestTemplateSelection(t *testing.T) { } msg := testComposeIssueCommentMessage(t, &mailCommentContext{Issue: issue, Doer: doer, ActionType: models.ActionCreateIssue, - Content: "test body"}, tos, false, "TestTemplateSelection") + Content: "test body"}, recipients, false, "TestTemplateSelection") expect(t, msg, "issue/new/subject", "issue/new/body") - comment := models.AssertExistsAndLoadBean(t, &models.Comment{ID: 2, Issue: issue}).(*models.Comment) msg = testComposeIssueCommentMessage(t, &mailCommentContext{Issue: issue, Doer: doer, ActionType: models.ActionCommentIssue, - Content: "test body", Comment: comment}, tos, false, "TestTemplateSelection") + Content: "test body", Comment: comment}, recipients, false, "TestTemplateSelection") expect(t, msg, "issue/default/subject", "issue/default/body") pull := models.AssertExistsAndLoadBean(t, &models.Issue{ID: 2, Repo: repo, Poster: doer}).(*models.Issue) comment = models.AssertExistsAndLoadBean(t, &models.Comment{ID: 4, Issue: pull}).(*models.Comment) msg = testComposeIssueCommentMessage(t, &mailCommentContext{Issue: pull, Doer: doer, ActionType: models.ActionCommentPull, - Content: "test body", Comment: comment}, tos, false, "TestTemplateSelection") + Content: "test body", Comment: comment}, recipients, false, "TestTemplateSelection") expect(t, msg, "pull/comment/subject", "pull/comment/body") msg = testComposeIssueCommentMessage(t, &mailCommentContext{Issue: issue, Doer: doer, ActionType: models.ActionCloseIssue, - Content: "test body", Comment: comment}, tos, false, "TestTemplateSelection") + Content: "test body", Comment: comment}, recipients, false, "TestTemplateSelection") expect(t, msg, "Re: [user2/repo1] issue1 (#1)", "issue/close/body") } func TestTemplateServices(t *testing.T) { - assert.NoError(t, models.PrepareTestDatabase()) - var mailService = setting.Mailer{ - From: "test@gitea.com", - } - - setting.MailService = &mailService - setting.Domain = "localhost" - - doer := models.AssertExistsAndLoadBean(t, &models.User{ID: 2}).(*models.User) - repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1, Owner: doer}).(*models.Repository) - issue := models.AssertExistsAndLoadBean(t, &models.Issue{ID: 1, Repo: repo, Poster: doer}).(*models.Issue) - comment := models.AssertExistsAndLoadBean(t, &models.Comment{ID: 2, Issue: issue}).(*models.Comment) + doer, _, issue, comment := prepareMailerTest(t) assert.NoError(t, issue.LoadRepo()) expect := func(t *testing.T, issue *models.Issue, comment *models.Comment, doer *models.User, @@ -187,9 +161,9 @@ func TestTemplateServices(t *testing.T) { btpl := template.Must(template.New("issue/default").Parse(tplBody)) InitMailRender(stpl, btpl) - tos := []string{"test@gitea.com"} + recipients := []*models.User{{Name: "Test", Email: "test@gitea.com"}} msg := testComposeIssueCommentMessage(t, &mailCommentContext{Issue: issue, Doer: doer, ActionType: actionType, - Content: "test body", Comment: comment}, tos, fromMention, "TestTemplateServices") + Content: "test body", Comment: comment}, recipients, fromMention, "TestTemplateServices") subject := msg.ToMessage().GetHeader("Subject") msgbuf := new(bytes.Buffer) @@ -219,9 +193,38 @@ func TestTemplateServices(t *testing.T) { "//Re: //") } -func testComposeIssueCommentMessage(t *testing.T, ctx *mailCommentContext, tos []string, fromMention bool, info string) *Message { - msgs, err := composeIssueCommentMessages(ctx, "en-US", tos, fromMention, info) +func testComposeIssueCommentMessage(t *testing.T, ctx *mailCommentContext, recipients []*models.User, fromMention bool, info string) *Message { + msgs, err := composeIssueCommentMessages(ctx, "en-US", recipients, fromMention, info) assert.NoError(t, err) assert.Len(t, msgs, 1) return msgs[0] } + +func TestGenerateAdditionalHeaders(t *testing.T) { + doer, _, issue, _ := prepareMailerTest(t) + + ctx := &mailCommentContext{Issue: issue, Doer: doer} + recipient := &models.User{Name: "Test", Email: "test@gitea.com"} + + headers := generateAdditionalHeaders(ctx, "dummy-reason", recipient) + + expected := map[string]string{ + "List-ID": "user2/repo1 ", + "List-Archive": "", + "X-Gitea-Reason": "dummy-reason", + "X-Gitea-Sender": "< Ur Tw ><", + "X-Gitea-Recipient": "Test", + "X-Gitea-Recipient-Address": "test@gitea.com", + "X-Gitea-Repository": "repo1", + "X-Gitea-Repository-Path": "user2/repo1", + "X-Gitea-Repository-Link": "https://try.gitea.io/user2/repo1", + "X-Gitea-Issue-ID": "1", + "X-Gitea-Issue-Link": "https://try.gitea.io/user2/repo1/issues/1", + } + + for key, value := range expected { + if assert.Contains(t, headers, key) { + assert.Equal(t, value, headers[key]) + } + } +} diff --git a/services/mailer/mailer.go b/services/mailer/mailer.go index 6b86734bf845..fae8d473e341 100644 --- a/services/mailer/mailer.go +++ b/services/mailer/mailer.go @@ -304,7 +304,7 @@ var Sender gomail.Sender // NewContext start mail queue service func NewContext() { // Need to check if mailQueue is nil because in during reinstall (user had installed - // before but swithed install lock off), this function will be called again + // before but switched install lock off), this function will be called again // while mail queue is already processing tasks, and produces a race condition. if setting.MailService == nil || mailQueue != nil { return diff --git a/services/mirror/mirror.go b/services/mirror/mirror.go index 9e2dde85fca2..1e30c919e6d4 100644 --- a/services/mirror/mirror.go +++ b/services/mirror/mirror.go @@ -7,593 +7,97 @@ package mirror import ( "context" "fmt" - "net/url" "strconv" "strings" - "time" "code.gitea.io/gitea/models" - "code.gitea.io/gitea/modules/cache" - "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/graceful" - "code.gitea.io/gitea/modules/lfs" "code.gitea.io/gitea/modules/log" - "code.gitea.io/gitea/modules/notification" - repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/sync" - "code.gitea.io/gitea/modules/timeutil" - "code.gitea.io/gitea/modules/util" ) // mirrorQueue holds an UniqueQueue object of the mirror var mirrorQueue = sync.NewUniqueQueue(setting.Repository.MirrorQueueLength) -func readAddress(m *models.Mirror) { - if len(m.Address) > 0 { - return - } - var err error - m.Address, err = remoteAddress(m.Repo.RepoPath()) - if err != nil { - log.Error("remoteAddress: %v", err) - } -} - -func remoteAddress(repoPath string) (string, error) { - var cmd *git.Command - err := git.LoadGitVersion() - if err != nil { - return "", err - } - if git.CheckGitVersionAtLeast("2.7") == nil { - cmd = git.NewCommand("remote", "get-url", "origin") - } else { - cmd = git.NewCommand("config", "--get", "remote.origin.url") - } - - result, err := cmd.RunInDir(repoPath) - if err != nil { - if strings.HasPrefix(err.Error(), "exit status 128 - fatal: No such remote ") { - return "", nil - } - return "", err - } - if len(result) > 0 { - return result[:len(result)-1], nil - } - return "", nil -} - -// sanitizeOutput sanitizes output of a command, replacing occurrences of the -// repository's remote address with a sanitized version. -func sanitizeOutput(output, repoPath string) (string, error) { - remoteAddr, err := remoteAddress(repoPath) - if err != nil { - // if we're unable to load the remote address, then we're unable to - // sanitize. - return "", err - } - return util.SanitizeMessage(output, remoteAddr), nil -} - -// AddressNoCredentials returns mirror address from Git repository config without credentials. -func AddressNoCredentials(m *models.Mirror) string { - readAddress(m) - u, err := url.Parse(m.Address) - if err != nil { - // this shouldn't happen but just return it unsanitised - return m.Address - } - u.User = nil - return u.String() -} - -// UpdateAddress writes new address to Git repository and database -func UpdateAddress(m *models.Mirror, addr string) error { - repoPath := m.Repo.RepoPath() - // Remove old origin - _, err := git.NewCommand("remote", "rm", "origin").RunInDir(repoPath) - if err != nil && !strings.HasPrefix(err.Error(), "exit status 128 - fatal: No such remote ") { - return err - } - - _, err = git.NewCommand("remote", "add", "origin", "--mirror=fetch", addr).RunInDir(repoPath) - if err != nil && !strings.HasPrefix(err.Error(), "exit status 128 - fatal: No such remote ") { - return err - } - - if m.Repo.HasWiki() { - wikiPath := m.Repo.WikiPath() - wikiRemotePath := repo_module.WikiRemoteURL(addr) - // Remove old origin of wiki - _, err := git.NewCommand("remote", "rm", "origin").RunInDir(wikiPath) - if err != nil && !strings.HasPrefix(err.Error(), "exit status 128 - fatal: No such remote ") { - return err - } - - _, err = git.NewCommand("remote", "add", "origin", "--mirror=fetch", wikiRemotePath).RunInDir(wikiPath) - if err != nil && !strings.HasPrefix(err.Error(), "exit status 128 - fatal: No such remote ") { - return err - } - } - - m.Repo.OriginalURL = addr - return models.UpdateRepositoryCols(m.Repo, "original_url") -} - -// gitShortEmptySha Git short empty SHA -const gitShortEmptySha = "0000000" - -// mirrorSyncResult contains information of a updated reference. -// If the oldCommitID is "0000000", it means a new reference, the value of newCommitID is empty. -// If the newCommitID is "0000000", it means the reference is deleted, the value of oldCommitID is empty. -type mirrorSyncResult struct { - refName string - oldCommitID string - newCommitID string -} - -// parseRemoteUpdateOutput detects create, update and delete operations of references from upstream. -func parseRemoteUpdateOutput(output string) []*mirrorSyncResult { - results := make([]*mirrorSyncResult, 0, 3) - lines := strings.Split(output, "\n") - for i := range lines { - // Make sure reference name is presented before continue - idx := strings.Index(lines[i], "-> ") - if idx == -1 { - continue - } - - refName := lines[i][idx+3:] - - switch { - case strings.HasPrefix(lines[i], " * "): // New reference - if strings.HasPrefix(lines[i], " * [new tag]") { - refName = git.TagPrefix + refName - } else if strings.HasPrefix(lines[i], " * [new branch]") { - refName = git.BranchPrefix + refName - } - results = append(results, &mirrorSyncResult{ - refName: refName, - oldCommitID: gitShortEmptySha, - }) - case strings.HasPrefix(lines[i], " - "): // Delete reference - results = append(results, &mirrorSyncResult{ - refName: refName, - newCommitID: gitShortEmptySha, - }) - case strings.HasPrefix(lines[i], " + "): // Force update - if idx := strings.Index(refName, " "); idx > -1 { - refName = refName[:idx] - } - delimIdx := strings.Index(lines[i][3:], " ") - if delimIdx == -1 { - log.Error("SHA delimiter not found: %q", lines[i]) - continue - } - shas := strings.Split(lines[i][3:delimIdx+3], "...") - if len(shas) != 2 { - log.Error("Expect two SHAs but not what found: %q", lines[i]) - continue - } - results = append(results, &mirrorSyncResult{ - refName: refName, - oldCommitID: shas[0], - newCommitID: shas[1], - }) - case strings.HasPrefix(lines[i], " "): // New commits of a reference - delimIdx := strings.Index(lines[i][3:], " ") - if delimIdx == -1 { - log.Error("SHA delimiter not found: %q", lines[i]) - continue - } - shas := strings.Split(lines[i][3:delimIdx+3], "..") - if len(shas) != 2 { - log.Error("Expect two SHAs but not what found: %q", lines[i]) - continue - } - results = append(results, &mirrorSyncResult{ - refName: refName, - oldCommitID: shas[0], - newCommitID: shas[1], - }) - - default: - log.Warn("parseRemoteUpdateOutput: unexpected update line %q", lines[i]) - } - } - return results -} - -// runSync returns true if sync finished without error. -func runSync(ctx context.Context, m *models.Mirror) ([]*mirrorSyncResult, bool) { - repoPath := m.Repo.RepoPath() - wikiPath := m.Repo.WikiPath() - timeout := time.Duration(setting.Git.Timeout.Mirror) * time.Second - - log.Trace("SyncMirrors [repo: %-v]: running git remote update...", m.Repo) - gitArgs := []string{"remote", "update"} - if m.EnablePrune { - gitArgs = append(gitArgs, "--prune") - } - - stdoutBuilder := strings.Builder{} - stderrBuilder := strings.Builder{} - if err := git.NewCommand(gitArgs...). - SetDescription(fmt.Sprintf("Mirror.runSync: %s", m.Repo.FullName())). - RunInDirTimeoutPipeline(timeout, repoPath, &stdoutBuilder, &stderrBuilder); err != nil { - stdout := stdoutBuilder.String() - stderr := stderrBuilder.String() - // sanitize the output, since it may contain the remote address, which may - // contain a password - stderrMessage, sanitizeErr := sanitizeOutput(stderr, repoPath) - if sanitizeErr != nil { - log.Error("sanitizeOutput failed on stderr: %v", sanitizeErr) - log.Error("Failed to update mirror repository %v:\nStdout: %s\nStderr: %s\nErr: %v", m.Repo, stdout, stderr, err) - return nil, false - } - stdoutMessage, err := sanitizeOutput(stdout, repoPath) - if err != nil { - log.Error("sanitizeOutput failed: %v", sanitizeErr) - log.Error("Failed to update mirror repository %v:\nStdout: %s\nStderr: %s\nErr: %v", m.Repo, stdout, stderrMessage, err) - return nil, false - } - - log.Error("Failed to update mirror repository %v:\nStdout: %s\nStderr: %s\nErr: %v", m.Repo, stdoutMessage, stderrMessage, err) - desc := fmt.Sprintf("Failed to update mirror repository '%s': %s", repoPath, stderrMessage) - if err = models.CreateRepositoryNotice(desc); err != nil { - log.Error("CreateRepositoryNotice: %v", err) - } - return nil, false - } - output := stderrBuilder.String() - - gitRepo, err := git.OpenRepository(repoPath) - if err != nil { - log.Error("OpenRepository: %v", err) - return nil, false - } - defer gitRepo.Close() - - log.Trace("SyncMirrors [repo: %-v]: syncing releases with tags...", m.Repo) - if err = repo_module.SyncReleasesWithTags(m.Repo, gitRepo); err != nil { - log.Error("Failed to synchronize tags to releases for repository: %v", err) - } - - if m.LFS && setting.LFS.StartServer { - log.Trace("SyncMirrors [repo: %-v]: syncing LFS objects...", m.Repo) - readAddress(m) - ep := lfs.DetermineEndpoint(m.Address, m.LFSEndpoint) - if err = repo_module.StoreMissingLfsObjectsInRepository(ctx, m.Repo, gitRepo, ep); err != nil { - log.Error("Failed to synchronize LFS objects for repository: %v", err) - } - } - - log.Trace("SyncMirrors [repo: %-v]: updating size of repository", m.Repo) - if err := m.Repo.UpdateSize(models.DefaultDBContext()); err != nil { - log.Error("Failed to update size for mirror repository: %v", err) - } - - if m.Repo.HasWiki() { - log.Trace("SyncMirrors [repo: %-v Wiki]: running git remote update...", m.Repo) - stderrBuilder.Reset() - stdoutBuilder.Reset() - if err := git.NewCommand("remote", "update", "--prune"). - SetDescription(fmt.Sprintf("Mirror.runSync Wiki: %s ", m.Repo.FullName())). - RunInDirTimeoutPipeline(timeout, wikiPath, &stdoutBuilder, &stderrBuilder); err != nil { - stdout := stdoutBuilder.String() - stderr := stderrBuilder.String() - // sanitize the output, since it may contain the remote address, which may - // contain a password - stderrMessage, sanitizeErr := sanitizeOutput(stderr, repoPath) - if sanitizeErr != nil { - log.Error("sanitizeOutput failed on stderr: %v", sanitizeErr) - log.Error("Failed to update mirror repository wiki %v:\nStdout: %s\nStderr: %s\nErr: %v", m.Repo, stdout, stderr, err) - return nil, false - } - stdoutMessage, err := sanitizeOutput(stdout, repoPath) - if err != nil { - log.Error("sanitizeOutput failed: %v", sanitizeErr) - log.Error("Failed to update mirror repository wiki %v:\nStdout: %s\nStderr: %s\nErr: %v", m.Repo, stdout, stderrMessage, err) - return nil, false - } - - log.Error("Failed to update mirror repository wiki %v:\nStdout: %s\nStderr: %s\nErr: %v", m.Repo, stdoutMessage, stderrMessage, err) - desc := fmt.Sprintf("Failed to update mirror repository wiki '%s': %s", wikiPath, stderrMessage) - if err = models.CreateRepositoryNotice(desc); err != nil { - log.Error("CreateRepositoryNotice: %v", err) - } - return nil, false - } - log.Trace("SyncMirrors [repo: %-v Wiki]: git remote update complete", m.Repo) - } - - log.Trace("SyncMirrors [repo: %-v]: invalidating mirror branch caches...", m.Repo) - branches, _, err := repo_module.GetBranches(m.Repo, 0, 0) - if err != nil { - log.Error("GetBranches: %v", err) - return nil, false - } - - for _, branch := range branches { - cache.Remove(m.Repo.GetCommitsCountCacheKey(branch.Name, true)) - } - - m.UpdatedUnix = timeutil.TimeStampNow() - return parseRemoteUpdateOutput(output), true -} - -// Address returns mirror address from Git repository config without credentials. -func Address(m *models.Mirror) string { - readAddress(m) - return util.SanitizeURLCredentials(m.Address, false) -} - -// Username returns the mirror address username -func Username(m *models.Mirror) string { - readAddress(m) - u, err := url.Parse(m.Address) - if err != nil { - // this shouldn't happen but if it does return "" - return "" - } - return u.User.Username() -} - -// Password returns the mirror address password -func Password(m *models.Mirror) string { - readAddress(m) - u, err := url.Parse(m.Address) - if err != nil { - // this shouldn't happen but if it does return "" - return "" - } - password, _ := u.User.Password() - return password -} - // Update checks and updates mirror repositories. func Update(ctx context.Context) error { log.Trace("Doing: Update") - if err := models.MirrorsIterate(func(idx int, bean interface{}) error { - m := bean.(*models.Mirror) - if m.Repo == nil { - log.Error("Disconnected mirror repository found: %d", m.ID) + + handler := func(idx int, bean interface{}) error { + var item string + if m, ok := bean.(*models.Mirror); ok { + if m.Repo == nil { + log.Error("Disconnected mirror found: %d", m.ID) + return nil + } + item = fmt.Sprintf("pull %d", m.RepoID) + } else if m, ok := bean.(*models.PushMirror); ok { + if m.Repo == nil { + log.Error("Disconnected push-mirror found: %d", m.ID) + return nil + } + item = fmt.Sprintf("push %d", m.ID) + } else { + log.Error("Unknown bean: %v", bean) return nil } + select { case <-ctx.Done(): return fmt.Errorf("Aborted") default: - mirrorQueue.Add(m.RepoID) + mirrorQueue.Add(item) return nil } - }); err != nil { - log.Trace("Update: %v", err) + } + + if err := models.MirrorsIterate(handler); err != nil { + log.Error("MirrorsIterate: %v", err) + return err + } + if err := models.PushMirrorsIterate(handler); err != nil { + log.Error("PushMirrorsIterate: %v", err) return err } log.Trace("Finished: Update") return nil } -// SyncMirrors checks and syncs mirrors. +// syncMirrors checks and syncs mirrors. // FIXME: graceful: this should be a persistable queue -func SyncMirrors(ctx context.Context) { +func syncMirrors(ctx context.Context) { // Start listening on new sync requests. for { select { case <-ctx.Done(): mirrorQueue.Close() return - case repoID := <-mirrorQueue.Queue(): - syncMirror(ctx, repoID) - } - } -} - -func syncMirror(ctx context.Context, repoID string) { - log.Trace("SyncMirrors [repo_id: %v]", repoID) - defer func() { - err := recover() - if err == nil { - return - } - // There was a panic whilst syncMirrors... - log.Error("PANIC whilst syncMirrors[%s] Panic: %v\nStacktrace: %s", repoID, err, log.Stack(2)) - }() - mirrorQueue.Remove(repoID) - - id, _ := strconv.ParseInt(repoID, 10, 64) - m, err := models.GetMirrorByRepoID(id) - if err != nil { - log.Error("GetMirrorByRepoID [%s]: %v", repoID, err) - return - } - - log.Trace("SyncMirrors [repo: %-v]: Running Sync", m.Repo) - results, ok := runSync(ctx, m) - if !ok { - return - } - - log.Trace("SyncMirrors [repo: %-v]: Scheduling next update", m.Repo) - m.ScheduleNextUpdate() - if err = models.UpdateMirror(m); err != nil { - log.Error("UpdateMirror [%s]: %v", repoID, err) - return - } - - var gitRepo *git.Repository - if len(results) == 0 { - log.Trace("SyncMirrors [repo: %-v]: no branches updated", m.Repo) - } else { - log.Trace("SyncMirrors [repo: %-v]: %d branches updated", m.Repo, len(results)) - gitRepo, err = git.OpenRepository(m.Repo.RepoPath()) - if err != nil { - log.Error("OpenRepository [%d]: %v", m.RepoID, err) - return - } - defer gitRepo.Close() - - if ok := checkAndUpdateEmptyRepository(m, gitRepo, results); !ok { - return - } - } - - for _, result := range results { - // Discard GitHub pull requests, i.e. refs/pull/* - if strings.HasPrefix(result.refName, "refs/pull/") { - continue - } - - tp, _ := git.SplitRefName(result.refName) - - // Create reference - if result.oldCommitID == gitShortEmptySha { - if tp == git.TagPrefix { - tp = "tag" - } else if tp == git.BranchPrefix { - tp = "branch" + case item := <-mirrorQueue.Queue(): + id, _ := strconv.ParseInt(item[5:], 10, 64) + if strings.HasPrefix(item, "pull") { + _ = SyncPullMirror(ctx, id) + } else if strings.HasPrefix(item, "push") { + _ = SyncPushMirror(ctx, id) + } else { + log.Error("Unknown item in queue: %v", item) } - commitID, err := gitRepo.GetRefCommitID(result.refName) - if err != nil { - log.Error("gitRepo.GetRefCommitID [repo_id: %s, ref_name: %s]: %v", m.RepoID, result.refName, err) - continue - } - notification.NotifySyncPushCommits(m.Repo.MustOwner(), m.Repo, &repo_module.PushUpdateOptions{ - RefFullName: result.refName, - OldCommitID: git.EmptySHA, - NewCommitID: commitID, - }, repo_module.NewPushCommits()) - notification.NotifySyncCreateRef(m.Repo.MustOwner(), m.Repo, tp, result.refName) - continue - } - - // Delete reference - if result.newCommitID == gitShortEmptySha { - notification.NotifySyncDeleteRef(m.Repo.MustOwner(), m.Repo, tp, result.refName) - continue - } - - // Push commits - oldCommitID, err := git.GetFullCommitID(gitRepo.Path, result.oldCommitID) - if err != nil { - log.Error("GetFullCommitID [%d]: %v", m.RepoID, err) - continue - } - newCommitID, err := git.GetFullCommitID(gitRepo.Path, result.newCommitID) - if err != nil { - log.Error("GetFullCommitID [%d]: %v", m.RepoID, err) - continue - } - commits, err := gitRepo.CommitsBetweenIDs(newCommitID, oldCommitID) - if err != nil { - log.Error("CommitsBetweenIDs [repo_id: %d, new_commit_id: %s, old_commit_id: %s]: %v", m.RepoID, newCommitID, oldCommitID, err) - continue + mirrorQueue.Remove(item) } - - theCommits := repo_module.ListToPushCommits(commits) - if len(theCommits.Commits) > setting.UI.FeedMaxCommitNum { - theCommits.Commits = theCommits.Commits[:setting.UI.FeedMaxCommitNum] - } - - theCommits.CompareURL = m.Repo.ComposeCompareURL(oldCommitID, newCommitID) - - notification.NotifySyncPushCommits(m.Repo.MustOwner(), m.Repo, &repo_module.PushUpdateOptions{ - RefFullName: result.refName, - OldCommitID: oldCommitID, - NewCommitID: newCommitID, - }, theCommits) - } - log.Trace("SyncMirrors [repo: %-v]: done notifying updated branches/tags - now updating last commit time", m.Repo) - - // Get latest commit date and update to current repository updated time - commitDate, err := git.GetLatestCommitTime(m.Repo.RepoPath()) - if err != nil { - log.Error("GetLatestCommitDate [%d]: %v", m.RepoID, err) - return - } - - if err = models.UpdateRepositoryUpdatedTime(m.RepoID, commitDate); err != nil { - log.Error("Update repository 'updated_unix' [%d]: %v", m.RepoID, err) - return } - - log.Trace("SyncMirrors [repo: %-v]: Successfully updated", m.Repo) -} - -func checkAndUpdateEmptyRepository(m *models.Mirror, gitRepo *git.Repository, results []*mirrorSyncResult) bool { - if !m.Repo.IsEmpty { - return true - } - - hasDefault := false - hasMaster := false - hasMain := false - defaultBranchName := m.Repo.DefaultBranch - if len(defaultBranchName) == 0 { - defaultBranchName = setting.Repository.DefaultBranch - } - firstName := "" - for _, result := range results { - if strings.HasPrefix(result.refName, "refs/pull/") { - continue - } - tp, name := git.SplitRefName(result.refName) - if len(tp) > 0 && tp != git.BranchPrefix { - continue - } - if len(firstName) == 0 { - firstName = name - } - - hasDefault = hasDefault || name == defaultBranchName - hasMaster = hasMaster || name == "master" - hasMain = hasMain || name == "main" - } - - if len(firstName) > 0 { - if hasDefault { - m.Repo.DefaultBranch = defaultBranchName - } else if hasMaster { - m.Repo.DefaultBranch = "master" - } else if hasMain { - m.Repo.DefaultBranch = "main" - } else { - m.Repo.DefaultBranch = firstName - } - // Update the git repository default branch - if err := gitRepo.SetDefaultBranch(m.Repo.DefaultBranch); err != nil { - if !git.IsErrUnsupportedVersion(err) { - log.Error("Failed to update default branch of underlying git repository %-v. Error: %v", m.Repo, err) - desc := fmt.Sprintf("Failed to uupdate default branch of underlying git repository '%s': %v", m.Repo.RepoPath(), err) - if err = models.CreateRepositoryNotice(desc); err != nil { - log.Error("CreateRepositoryNotice: %v", err) - } - return false - } - } - m.Repo.IsEmpty = false - // Update the is empty and default_branch columns - if err := models.UpdateRepositoryCols(m.Repo, "default_branch", "is_empty"); err != nil { - log.Error("Failed to update default branch of repository %-v. Error: %v", m.Repo, err) - desc := fmt.Sprintf("Failed to uupdate default branch of repository '%s': %v", m.Repo.RepoPath(), err) - if err = models.CreateRepositoryNotice(desc); err != nil { - log.Error("CreateRepositoryNotice: %v", err) - } - return false - } - } - return true } // InitSyncMirrors initializes a go routine to sync the mirrors func InitSyncMirrors() { - go graceful.GetManager().RunWithShutdownContext(SyncMirrors) + go graceful.GetManager().RunWithShutdownContext(syncMirrors) } // StartToMirror adds repoID to mirror queue func StartToMirror(repoID int64) { - go mirrorQueue.Add(repoID) + go mirrorQueue.Add(fmt.Sprintf("pull %d", repoID)) +} + +// AddPushMirrorToQueue adds the push mirror to the queue +func AddPushMirrorToQueue(mirrorID int64) { + go mirrorQueue.Add(fmt.Sprintf("push %d", mirrorID)) } diff --git a/services/mirror/mirror_pull.go b/services/mirror/mirror_pull.go new file mode 100644 index 000000000000..a16724b36fef --- /dev/null +++ b/services/mirror/mirror_pull.go @@ -0,0 +1,452 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package mirror + +import ( + "context" + "fmt" + "strings" + "time" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/cache" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/lfs" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/notification" + repo_module "code.gitea.io/gitea/modules/repository" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/util" +) + +// gitShortEmptySha Git short empty SHA +const gitShortEmptySha = "0000000" + +// UpdateAddress writes new address to Git repository and database +func UpdateAddress(m *models.Mirror, addr string) error { + remoteName := m.GetRemoteName() + repoPath := m.Repo.RepoPath() + // Remove old remote + _, err := git.NewCommand("remote", "rm", remoteName).RunInDir(repoPath) + if err != nil && !strings.HasPrefix(err.Error(), "exit status 128 - fatal: No such remote ") { + return err + } + + _, err = git.NewCommand("remote", "add", remoteName, "--mirror=fetch", addr).RunInDir(repoPath) + if err != nil && !strings.HasPrefix(err.Error(), "exit status 128 - fatal: No such remote ") { + return err + } + + if m.Repo.HasWiki() { + wikiPath := m.Repo.WikiPath() + wikiRemotePath := repo_module.WikiRemoteURL(addr) + // Remove old remote of wiki + _, err := git.NewCommand("remote", "rm", remoteName).RunInDir(wikiPath) + if err != nil && !strings.HasPrefix(err.Error(), "exit status 128 - fatal: No such remote ") { + return err + } + + _, err = git.NewCommand("remote", "add", remoteName, "--mirror=fetch", wikiRemotePath).RunInDir(wikiPath) + if err != nil && !strings.HasPrefix(err.Error(), "exit status 128 - fatal: No such remote ") { + return err + } + } + + m.Repo.OriginalURL = addr + return models.UpdateRepositoryCols(m.Repo, "original_url") +} + +// mirrorSyncResult contains information of a updated reference. +// If the oldCommitID is "0000000", it means a new reference, the value of newCommitID is empty. +// If the newCommitID is "0000000", it means the reference is deleted, the value of oldCommitID is empty. +type mirrorSyncResult struct { + refName string + oldCommitID string + newCommitID string +} + +// parseRemoteUpdateOutput detects create, update and delete operations of references from upstream. +func parseRemoteUpdateOutput(output string) []*mirrorSyncResult { + results := make([]*mirrorSyncResult, 0, 3) + lines := strings.Split(output, "\n") + for i := range lines { + // Make sure reference name is presented before continue + idx := strings.Index(lines[i], "-> ") + if idx == -1 { + continue + } + + refName := lines[i][idx+3:] + + switch { + case strings.HasPrefix(lines[i], " * "): // New reference + if strings.HasPrefix(lines[i], " * [new tag]") { + refName = git.TagPrefix + refName + } else if strings.HasPrefix(lines[i], " * [new branch]") { + refName = git.BranchPrefix + refName + } + results = append(results, &mirrorSyncResult{ + refName: refName, + oldCommitID: gitShortEmptySha, + }) + case strings.HasPrefix(lines[i], " - "): // Delete reference + results = append(results, &mirrorSyncResult{ + refName: refName, + newCommitID: gitShortEmptySha, + }) + case strings.HasPrefix(lines[i], " + "): // Force update + if idx := strings.Index(refName, " "); idx > -1 { + refName = refName[:idx] + } + delimIdx := strings.Index(lines[i][3:], " ") + if delimIdx == -1 { + log.Error("SHA delimiter not found: %q", lines[i]) + continue + } + shas := strings.Split(lines[i][3:delimIdx+3], "...") + if len(shas) != 2 { + log.Error("Expect two SHAs but not what found: %q", lines[i]) + continue + } + results = append(results, &mirrorSyncResult{ + refName: refName, + oldCommitID: shas[0], + newCommitID: shas[1], + }) + case strings.HasPrefix(lines[i], " "): // New commits of a reference + delimIdx := strings.Index(lines[i][3:], " ") + if delimIdx == -1 { + log.Error("SHA delimiter not found: %q", lines[i]) + continue + } + shas := strings.Split(lines[i][3:delimIdx+3], "..") + if len(shas) != 2 { + log.Error("Expect two SHAs but not what found: %q", lines[i]) + continue + } + results = append(results, &mirrorSyncResult{ + refName: refName, + oldCommitID: shas[0], + newCommitID: shas[1], + }) + + default: + log.Warn("parseRemoteUpdateOutput: unexpected update line %q", lines[i]) + } + } + return results +} + +// runSync returns true if sync finished without error. +func runSync(ctx context.Context, m *models.Mirror) ([]*mirrorSyncResult, bool) { + repoPath := m.Repo.RepoPath() + wikiPath := m.Repo.WikiPath() + timeout := time.Duration(setting.Git.Timeout.Mirror) * time.Second + + log.Trace("SyncMirrors [repo: %-v]: running git remote update...", m.Repo) + gitArgs := []string{"remote", "update"} + if m.EnablePrune { + gitArgs = append(gitArgs, "--prune") + } + gitArgs = append(gitArgs, m.GetRemoteName()) + + remoteAddr, remoteErr := git.GetRemoteAddress(repoPath, m.GetRemoteName()) + if remoteErr != nil { + log.Error("GetRemoteAddress Error %v", remoteErr) + } + + stdoutBuilder := strings.Builder{} + stderrBuilder := strings.Builder{} + if err := git.NewCommand(gitArgs...). + SetDescription(fmt.Sprintf("Mirror.runSync: %s", m.Repo.FullName())). + RunInDirTimeoutPipeline(timeout, repoPath, &stdoutBuilder, &stderrBuilder); err != nil { + stdout := stdoutBuilder.String() + stderr := stderrBuilder.String() + + // sanitize the output, since it may contain the remote address, which may + // contain a password + + sanitizer := util.NewURLSanitizer(remoteAddr, true) + stderrMessage := sanitizer.Replace(stderr) + stdoutMessage := sanitizer.Replace(stdout) + + log.Error("Failed to update mirror repository %v:\nStdout: %s\nStderr: %s\nErr: %v", m.Repo, stdoutMessage, stderrMessage, err) + desc := fmt.Sprintf("Failed to update mirror repository '%s': %s", repoPath, stderrMessage) + if err = models.CreateRepositoryNotice(desc); err != nil { + log.Error("CreateRepositoryNotice: %v", err) + } + return nil, false + } + output := stderrBuilder.String() + + gitRepo, err := git.OpenRepository(repoPath) + if err != nil { + log.Error("OpenRepository: %v", err) + return nil, false + } + + log.Trace("SyncMirrors [repo: %-v]: syncing releases with tags...", m.Repo) + if err = repo_module.SyncReleasesWithTags(m.Repo, gitRepo); err != nil { + log.Error("Failed to synchronize tags to releases for repository: %v", err) + } + + if m.LFS && setting.LFS.StartServer { + log.Trace("SyncMirrors [repo: %-v]: syncing LFS objects...", m.Repo) + ep := lfs.DetermineEndpoint(remoteAddr.String(), m.LFSEndpoint) + if err = repo_module.StoreMissingLfsObjectsInRepository(ctx, m.Repo, gitRepo, ep); err != nil { + log.Error("Failed to synchronize LFS objects for repository: %v", err) + } + } + gitRepo.Close() + + log.Trace("SyncMirrors [repo: %-v]: updating size of repository", m.Repo) + if err := m.Repo.UpdateSize(models.DefaultDBContext()); err != nil { + log.Error("Failed to update size for mirror repository: %v", err) + } + + if m.Repo.HasWiki() { + log.Trace("SyncMirrors [repo: %-v Wiki]: running git remote update...", m.Repo) + stderrBuilder.Reset() + stdoutBuilder.Reset() + if err := git.NewCommand("remote", "update", "--prune", m.GetRemoteName()). + SetDescription(fmt.Sprintf("Mirror.runSync Wiki: %s ", m.Repo.FullName())). + RunInDirTimeoutPipeline(timeout, wikiPath, &stdoutBuilder, &stderrBuilder); err != nil { + stdout := stdoutBuilder.String() + stderr := stderrBuilder.String() + + // sanitize the output, since it may contain the remote address, which may + // contain a password + + remoteAddr, remoteErr := git.GetRemoteAddress(wikiPath, m.GetRemoteName()) + if remoteErr != nil { + log.Error("GetRemoteAddress Error %v", remoteErr) + } + + sanitizer := util.NewURLSanitizer(remoteAddr, true) + stderrMessage := sanitizer.Replace(stderr) + stdoutMessage := sanitizer.Replace(stdout) + + log.Error("Failed to update mirror repository wiki %v:\nStdout: %s\nStderr: %s\nErr: %v", m.Repo, stdoutMessage, stderrMessage, err) + desc := fmt.Sprintf("Failed to update mirror repository wiki '%s': %s", wikiPath, stderrMessage) + if err = models.CreateRepositoryNotice(desc); err != nil { + log.Error("CreateRepositoryNotice: %v", err) + } + return nil, false + } + log.Trace("SyncMirrors [repo: %-v Wiki]: git remote update complete", m.Repo) + } + + log.Trace("SyncMirrors [repo: %-v]: invalidating mirror branch caches...", m.Repo) + branches, _, err := repo_module.GetBranches(m.Repo, 0, 0) + if err != nil { + log.Error("GetBranches: %v", err) + return nil, false + } + + for _, branch := range branches { + cache.Remove(m.Repo.GetCommitsCountCacheKey(branch.Name, true)) + } + + m.UpdatedUnix = timeutil.TimeStampNow() + return parseRemoteUpdateOutput(output), true +} + +// SyncPullMirror starts the sync of the pull mirror and schedules the next run. +func SyncPullMirror(ctx context.Context, repoID int64) bool { + log.Trace("SyncMirrors [repo_id: %v]", repoID) + defer func() { + err := recover() + if err == nil { + return + } + // There was a panic whilst syncMirrors... + log.Error("PANIC whilst syncMirrors[%d] Panic: %v\nStacktrace: %s", repoID, err, log.Stack(2)) + }() + + m, err := models.GetMirrorByRepoID(repoID) + if err != nil { + log.Error("GetMirrorByRepoID [%d]: %v", repoID, err) + return false + } + + log.Trace("SyncMirrors [repo: %-v]: Running Sync", m.Repo) + results, ok := runSync(ctx, m) + if !ok { + return false + } + + log.Trace("SyncMirrors [repo: %-v]: Scheduling next update", m.Repo) + m.ScheduleNextUpdate() + if err = models.UpdateMirror(m); err != nil { + log.Error("UpdateMirror [%d]: %v", m.RepoID, err) + return false + } + + var gitRepo *git.Repository + if len(results) == 0 { + log.Trace("SyncMirrors [repo: %-v]: no branches updated", m.Repo) + } else { + log.Trace("SyncMirrors [repo: %-v]: %d branches updated", m.Repo, len(results)) + gitRepo, err = git.OpenRepository(m.Repo.RepoPath()) + if err != nil { + log.Error("OpenRepository [%d]: %v", m.RepoID, err) + return false + } + defer gitRepo.Close() + + if ok := checkAndUpdateEmptyRepository(m, gitRepo, results); !ok { + return false + } + } + + for _, result := range results { + // Discard GitHub pull requests, i.e. refs/pull/* + if strings.HasPrefix(result.refName, "refs/pull/") { + continue + } + + tp, _ := git.SplitRefName(result.refName) + + // Create reference + if result.oldCommitID == gitShortEmptySha { + if tp == git.TagPrefix { + tp = "tag" + } else if tp == git.BranchPrefix { + tp = "branch" + } + commitID, err := gitRepo.GetRefCommitID(result.refName) + if err != nil { + log.Error("gitRepo.GetRefCommitID [repo_id: %d, ref_name: %s]: %v", m.RepoID, result.refName, err) + continue + } + notification.NotifySyncPushCommits(m.Repo.MustOwner(), m.Repo, &repo_module.PushUpdateOptions{ + RefFullName: result.refName, + OldCommitID: git.EmptySHA, + NewCommitID: commitID, + }, repo_module.NewPushCommits()) + notification.NotifySyncCreateRef(m.Repo.MustOwner(), m.Repo, tp, result.refName) + continue + } + + // Delete reference + if result.newCommitID == gitShortEmptySha { + notification.NotifySyncDeleteRef(m.Repo.MustOwner(), m.Repo, tp, result.refName) + continue + } + + // Push commits + oldCommitID, err := git.GetFullCommitID(gitRepo.Path, result.oldCommitID) + if err != nil { + log.Error("GetFullCommitID [%d]: %v", m.RepoID, err) + continue + } + newCommitID, err := git.GetFullCommitID(gitRepo.Path, result.newCommitID) + if err != nil { + log.Error("GetFullCommitID [%d]: %v", m.RepoID, err) + continue + } + commits, err := gitRepo.CommitsBetweenIDs(newCommitID, oldCommitID) + if err != nil { + log.Error("CommitsBetweenIDs [repo_id: %d, new_commit_id: %s, old_commit_id: %s]: %v", m.RepoID, newCommitID, oldCommitID, err) + continue + } + + theCommits := repo_module.ListToPushCommits(commits) + if len(theCommits.Commits) > setting.UI.FeedMaxCommitNum { + theCommits.Commits = theCommits.Commits[:setting.UI.FeedMaxCommitNum] + } + + theCommits.CompareURL = m.Repo.ComposeCompareURL(oldCommitID, newCommitID) + + notification.NotifySyncPushCommits(m.Repo.MustOwner(), m.Repo, &repo_module.PushUpdateOptions{ + RefFullName: result.refName, + OldCommitID: oldCommitID, + NewCommitID: newCommitID, + }, theCommits) + } + log.Trace("SyncMirrors [repo: %-v]: done notifying updated branches/tags - now updating last commit time", m.Repo) + + // Get latest commit date and update to current repository updated time + commitDate, err := git.GetLatestCommitTime(m.Repo.RepoPath()) + if err != nil { + log.Error("GetLatestCommitDate [%d]: %v", m.RepoID, err) + return false + } + + if err = models.UpdateRepositoryUpdatedTime(m.RepoID, commitDate); err != nil { + log.Error("Update repository 'updated_unix' [%d]: %v", m.RepoID, err) + return false + } + + log.Trace("SyncMirrors [repo: %-v]: Successfully updated", m.Repo) + + return true +} + +func checkAndUpdateEmptyRepository(m *models.Mirror, gitRepo *git.Repository, results []*mirrorSyncResult) bool { + if !m.Repo.IsEmpty { + return true + } + + hasDefault := false + hasMaster := false + hasMain := false + defaultBranchName := m.Repo.DefaultBranch + if len(defaultBranchName) == 0 { + defaultBranchName = setting.Repository.DefaultBranch + } + firstName := "" + for _, result := range results { + if strings.HasPrefix(result.refName, "refs/pull/") { + continue + } + tp, name := git.SplitRefName(result.refName) + if len(tp) > 0 && tp != git.BranchPrefix { + continue + } + if len(firstName) == 0 { + firstName = name + } + + hasDefault = hasDefault || name == defaultBranchName + hasMaster = hasMaster || name == "master" + hasMain = hasMain || name == "main" + } + + if len(firstName) > 0 { + if hasDefault { + m.Repo.DefaultBranch = defaultBranchName + } else if hasMaster { + m.Repo.DefaultBranch = "master" + } else if hasMain { + m.Repo.DefaultBranch = "main" + } else { + m.Repo.DefaultBranch = firstName + } + // Update the git repository default branch + if err := gitRepo.SetDefaultBranch(m.Repo.DefaultBranch); err != nil { + if !git.IsErrUnsupportedVersion(err) { + log.Error("Failed to update default branch of underlying git repository %-v. Error: %v", m.Repo, err) + desc := fmt.Sprintf("Failed to uupdate default branch of underlying git repository '%s': %v", m.Repo.RepoPath(), err) + if err = models.CreateRepositoryNotice(desc); err != nil { + log.Error("CreateRepositoryNotice: %v", err) + } + return false + } + } + m.Repo.IsEmpty = false + // Update the is empty and default_branch columns + if err := models.UpdateRepositoryCols(m.Repo, "default_branch", "is_empty"); err != nil { + log.Error("Failed to update default branch of repository %-v. Error: %v", m.Repo, err) + desc := fmt.Sprintf("Failed to uupdate default branch of repository '%s': %v", m.Repo.RepoPath(), err) + if err = models.CreateRepositoryNotice(desc); err != nil { + log.Error("CreateRepositoryNotice: %v", err) + } + return false + } + } + return true +} diff --git a/services/mirror/mirror_push.go b/services/mirror/mirror_push.go new file mode 100644 index 000000000000..de813036894b --- /dev/null +++ b/services/mirror/mirror_push.go @@ -0,0 +1,242 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package mirror + +import ( + "context" + "errors" + "io" + "net/url" + "regexp" + "time" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/lfs" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/repository" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/util" +) + +var stripExitStatus = regexp.MustCompile(`exit status \d+ - `) + +// AddPushMirrorRemote registers the push mirror remote. +func AddPushMirrorRemote(m *models.PushMirror, addr string) error { + addRemoteAndConfig := func(addr, path string) error { + if _, err := git.NewCommand("remote", "add", "--mirror=push", m.RemoteName, addr).RunInDir(path); err != nil { + return err + } + if _, err := git.NewCommand("config", "--add", "remote."+m.RemoteName+".push", "+refs/heads/*:refs/heads/*").RunInDir(path); err != nil { + return err + } + if _, err := git.NewCommand("config", "--add", "remote."+m.RemoteName+".push", "+refs/tags/*:refs/tags/*").RunInDir(path); err != nil { + return err + } + return nil + } + + if err := addRemoteAndConfig(addr, m.Repo.RepoPath()); err != nil { + return err + } + + if m.Repo.HasWiki() { + wikiRemoteURL := repository.WikiRemoteURL(addr) + if len(wikiRemoteURL) > 0 { + if err := addRemoteAndConfig(wikiRemoteURL, m.Repo.WikiPath()); err != nil { + return err + } + } + } + + return nil +} + +// RemovePushMirrorRemote removes the push mirror remote. +func RemovePushMirrorRemote(m *models.PushMirror) error { + cmd := git.NewCommand("remote", "rm", m.RemoteName) + + if _, err := cmd.RunInDir(m.Repo.RepoPath()); err != nil { + return err + } + + if m.Repo.HasWiki() { + if _, err := cmd.RunInDir(m.Repo.WikiPath()); err != nil { + // The wiki remote may not exist + log.Warn("Wiki Remote[%d] could not be removed: %v", m.ID, err) + } + } + + return nil +} + +// SyncPushMirror starts the sync of the push mirror and schedules the next run. +func SyncPushMirror(ctx context.Context, mirrorID int64) bool { + log.Trace("SyncPushMirror [mirror: %d]", mirrorID) + defer func() { + err := recover() + if err == nil { + return + } + // There was a panic whilst syncPushMirror... + log.Error("PANIC whilst syncPushMirror[%d] Panic: %v\nStacktrace: %s", mirrorID, err, log.Stack(2)) + }() + + m, err := models.GetPushMirrorByID(mirrorID) + if err != nil { + log.Error("GetPushMirrorByID [%d]: %v", mirrorID, err) + return false + } + + m.LastError = "" + + log.Trace("SyncPushMirror [mirror: %d][repo: %-v]: Running Sync", m.ID, m.Repo) + err = runPushSync(ctx, m) + if err != nil { + log.Error("SyncPushMirror [mirror: %d][repo: %-v]: %v", m.ID, m.Repo, err) + m.LastError = stripExitStatus.ReplaceAllLiteralString(err.Error(), "") + } + + m.LastUpdateUnix = timeutil.TimeStampNow() + + if err := models.UpdatePushMirror(m); err != nil { + log.Error("UpdatePushMirror [%d]: %v", m.ID, err) + + return false + } + + log.Trace("SyncPushMirror [mirror: %d][repo: %-v]: Finished", m.ID, m.Repo) + + return err == nil +} + +func runPushSync(ctx context.Context, m *models.PushMirror) error { + timeout := time.Duration(setting.Git.Timeout.Mirror) * time.Second + + performPush := func(path string) error { + remoteAddr, err := git.GetRemoteAddress(path, m.RemoteName) + if err != nil { + log.Error("GetRemoteAddress(%s) Error %v", path, err) + return errors.New("Unexpected error") + } + + if setting.LFS.StartServer { + log.Trace("SyncMirrors [repo: %-v]: syncing LFS objects...", m.Repo) + + gitRepo, err := git.OpenRepository(path) + if err != nil { + log.Error("OpenRepository: %v", err) + return errors.New("Unexpected error") + } + defer gitRepo.Close() + + ep := lfs.DetermineEndpoint(remoteAddr.String(), "") + if err := pushAllLFSObjects(ctx, gitRepo, ep); err != nil { + return util.NewURLSanitizedError(err, remoteAddr, true) + } + } + + log.Trace("Pushing %s mirror[%d] remote %s", path, m.ID, m.RemoteName) + + if err := git.Push(path, git.PushOptions{ + Remote: m.RemoteName, + Force: true, + Mirror: true, + Timeout: timeout, + }); err != nil { + log.Error("Error pushing %s mirror[%d] remote %s: %v", path, m.ID, m.RemoteName, err) + + return util.NewURLSanitizedError(err, remoteAddr, true) + } + + return nil + } + + err := performPush(m.Repo.RepoPath()) + if err != nil { + return err + } + + if m.Repo.HasWiki() { + wikiPath := m.Repo.WikiPath() + _, err := git.GetRemoteAddress(wikiPath, m.RemoteName) + if err == nil { + err := performPush(wikiPath) + if err != nil { + return err + } + } else { + log.Trace("Skipping wiki: No remote configured") + } + } + + return nil +} + +func pushAllLFSObjects(ctx context.Context, gitRepo *git.Repository, endpoint *url.URL) error { + client := lfs.NewClient(endpoint) + contentStore := lfs.NewContentStore() + + pointerChan := make(chan lfs.PointerBlob) + errChan := make(chan error, 1) + go lfs.SearchPointerBlobs(ctx, gitRepo, pointerChan, errChan) + + uploadObjects := func(pointers []lfs.Pointer) error { + err := client.Upload(ctx, pointers, func(p lfs.Pointer, objectError error) (io.ReadCloser, error) { + if objectError != nil { + return nil, objectError + } + + content, err := contentStore.Get(p) + if err != nil { + log.Error("Error reading LFS object %v: %v", p, err) + } + return content, err + }) + if err != nil { + select { + case <-ctx.Done(): + return nil + default: + } + } + return err + } + + var batch []lfs.Pointer + for pointerBlob := range pointerChan { + exists, err := contentStore.Exists(pointerBlob.Pointer) + if err != nil { + log.Error("Error checking if LFS object %v exists: %v", pointerBlob.Pointer, err) + return err + } + if !exists { + log.Trace("Skipping missing LFS object %v", pointerBlob.Pointer) + continue + } + + batch = append(batch, pointerBlob.Pointer) + if len(batch) >= client.BatchSize() { + if err := uploadObjects(batch); err != nil { + return err + } + batch = nil + } + } + if len(batch) > 0 { + if err := uploadObjects(batch); err != nil { + return err + } + } + + err, has := <-errChan + if has { + log.Error("Error enumerating LFS objects for repository: %v", err) + return err + } + + return nil +} diff --git a/services/pull/check.go b/services/pull/check.go index 3ec76de5e874..9db1654cfbc0 100644 --- a/services/pull/check.go +++ b/services/pull/check.go @@ -28,21 +28,19 @@ var prQueue queue.UniqueQueue // AddToTaskQueue adds itself to pull request test task queue. func AddToTaskQueue(pr *models.PullRequest) { - go func() { - err := prQueue.PushFunc(strconv.FormatInt(pr.ID, 10), func() error { - pr.Status = models.PullRequestStatusChecking - err := pr.UpdateColsIfNotMerged("status") - if err != nil { - log.Error("AddToTaskQueue.UpdateCols[%d].(add to queue): %v", pr.ID, err) - } else { - log.Trace("Adding PR ID: %d to the test pull requests queue", pr.ID) - } - return err - }) - if err != nil && err != queue.ErrAlreadyInQueue { - log.Error("Error adding prID %d to the test pull requests queue: %v", pr.ID, err) + err := prQueue.PushFunc(strconv.FormatInt(pr.ID, 10), func() error { + pr.Status = models.PullRequestStatusChecking + err := pr.UpdateColsIfNotMerged("status") + if err != nil { + log.Error("AddToTaskQueue.UpdateCols[%d].(add to queue): %v", pr.ID, err) + } else { + log.Trace("Adding PR ID: %d to the test pull requests queue", pr.ID) } - }() + return err + }) + if err != nil && err != queue.ErrAlreadyInQueue { + log.Error("Error adding prID %d to the test pull requests queue: %v", pr.ID, err) + } } // checkAndUpdateStatus checks if pull request is possible to leaving checking status, diff --git a/services/pull/check_test.go b/services/pull/check_test.go index 33a230e5ab86..f6614ea0ad27 100644 --- a/services/pull/check_test.go +++ b/services/pull/check_test.go @@ -6,7 +6,6 @@ package pull import ( - "context" "strconv" "testing" "time" @@ -54,9 +53,9 @@ func TestPullRequest_AddToTaskQueue(t *testing.T) { assert.True(t, has) assert.NoError(t, err) - prQueue.Run(func(_ context.Context, shutdown func()) { + prQueue.Run(func(shutdown func()) { queueShutdown = append(queueShutdown, shutdown) - }, func(_ context.Context, terminate func()) { + }, func(terminate func()) { queueTerminate = append(queueTerminate, terminate) }) diff --git a/services/pull/pull.go b/services/pull/pull.go index 153a75094d73..6b3acd200425 100644 --- a/services/pull/pull.go +++ b/services/pull/pull.go @@ -9,6 +9,7 @@ import ( "bytes" "context" "fmt" + "regexp" "strings" "time" @@ -302,7 +303,11 @@ func AddTestPullRequestTask(doer *models.User, repoID int64, branch string, isSy for _, pr := range prs { divergence, err := GetDiverging(pr) if err != nil { - log.Error("GetDiverging: %v", err) + if models.IsErrBranchDoesNotExist(err) && !git.IsBranchExist(pr.HeadRepo.RepoPath(), pr.HeadBranch) { + log.Warn("Cannot test PR %s/%d: head_branch %s no longer exists", pr.BaseRepo.Name, pr.IssueID, pr.HeadBranch) + } else { + log.Error("GetDiverging: %v", err) + } } else { err = pr.UpdateCommitDivergence(divergence.Ahead, divergence.Behind) if err != nil { @@ -389,6 +394,10 @@ func checkIfPRContentChanged(pr *models.PullRequest, oldCommitID, newCommitID st // corresponding branches of base repository. // FIXME: Only push branches that are actually updates? func PushToBaseRepo(pr *models.PullRequest) (err error) { + return pushToBaseRepoHelper(pr, "") +} + +func pushToBaseRepoHelper(pr *models.PullRequest, prefixHeadBranch string) (err error) { log.Trace("PushToBaseRepo[%d]: pushing commits to base repo '%s'", pr.BaseRepoID, pr.GetGitRefName()) if err := pr.LoadHeadRepo(); err != nil { @@ -414,7 +423,7 @@ func PushToBaseRepo(pr *models.PullRequest) (err error) { if err := git.Push(headRepoPath, git.PushOptions{ Remote: baseRepoPath, - Branch: pr.HeadBranch + ":" + gitRefName, + Branch: prefixHeadBranch + pr.HeadBranch + ":" + gitRefName, Force: true, // Use InternalPushingEnvironment here because we know that pre-receive and post-receive do not run on a refs/pulls/... Env: models.InternalPushingEnvironment(pr.Issue.Poster, pr.BaseRepo), @@ -427,6 +436,14 @@ func PushToBaseRepo(pr *models.PullRequest) (err error) { rejectErr := err.(*git.ErrPushRejected) log.Info("Unable to push PR head for %s#%d (%-v:%s) due to rejection:\nStdout: %s\nStderr: %s\nError: %v", pr.BaseRepo.FullName(), pr.Index, pr.BaseRepo, gitRefName, rejectErr.StdOut, rejectErr.StdErr, rejectErr.Err) return err + } else if git.IsErrMoreThanOne(err) { + if prefixHeadBranch != "" { + log.Info("Can't push with %s%s", prefixHeadBranch, pr.HeadBranch) + return err + } + log.Info("Retrying to push with refs/heads/%s", pr.HeadBranch) + err = pushToBaseRepoHelper(pr, "refs/heads/") + return err } log.Error("Unable to push PR head for %s#%d (%-v:%s) due to Error: %v", pr.BaseRepo.FullName(), pr.Index, pr.BaseRepo, gitRefName, err) return fmt.Errorf("Push: %s:%s %s:%s %v", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), gitRefName, err) @@ -516,6 +533,8 @@ func CloseRepoBranchesPulls(doer *models.User, repo *models.Repository) error { return nil } +var commitMessageTrailersPattern = regexp.MustCompile(`(?:^|\n\n)(?:[\w-]+[ \t]*:[^\n]+\n*(?:[ \t]+[^\n]+\n*)*)+$`) + // GetSquashMergeCommitMessages returns the commit messages between head and merge base (if there is one) func GetSquashMergeCommitMessages(pr *models.PullRequest) string { if err := pr.LoadIssue(); err != nil { @@ -570,16 +589,47 @@ func GetSquashMergeCommitMessages(pr *models.PullRequest) string { authors := make([]string, 0, list.Len()) stringBuilder := strings.Builder{} - stringBuilder.WriteString(pr.Issue.Content) - if stringBuilder.Len() > 0 { - stringBuilder.WriteRune('\n') - stringBuilder.WriteRune('\n') + if !setting.Repository.PullRequest.PopulateSquashCommentWithCommitMessages { + message := strings.TrimSpace(pr.Issue.Content) + stringBuilder.WriteString(message) + if stringBuilder.Len() > 0 { + stringBuilder.WriteRune('\n') + if !commitMessageTrailersPattern.MatchString(message) { + stringBuilder.WriteRune('\n') + } + } } // commits list is in reverse chronological order element := list.Back() for element != nil { commit := element.Value.(*git.Commit) + + if setting.Repository.PullRequest.PopulateSquashCommentWithCommitMessages { + maxSize := setting.Repository.PullRequest.DefaultMergeMessageSize + if maxSize < 0 || stringBuilder.Len() < maxSize { + var toWrite []byte + if element == list.Back() { + toWrite = []byte(strings.TrimPrefix(commit.CommitMessage, pr.Issue.Title)) + } else { + toWrite = []byte(commit.CommitMessage) + } + + if len(toWrite) > maxSize-stringBuilder.Len() && maxSize > -1 { + toWrite = append(toWrite[:maxSize-stringBuilder.Len()], "..."...) + } + if _, err := stringBuilder.Write(toWrite); err != nil { + log.Error("Unable to write commit message Error: %v", err) + return "" + } + + if _, err := stringBuilder.WriteRune('\n'); err != nil { + log.Error("Unable to write commit message Error: %v", err) + return "" + } + } + } + authorString := commit.Author.String() if !authorsMap[authorString] && authorString != posterSig { authors = append(authors, authorString) @@ -617,13 +667,6 @@ func GetSquashMergeCommitMessages(pr *models.PullRequest) string { } } - if len(authors) > 0 { - if _, err := stringBuilder.WriteRune('\n'); err != nil { - log.Error("Unable to write to string builder Error: %v", err) - return "" - } - } - for _, author := range authors { if _, err := stringBuilder.Write([]byte("Co-authored-by: ")); err != nil { log.Error("Unable to write to string builder Error: %v", err) @@ -677,7 +720,8 @@ func GetIssuesLastCommitStatus(issues models.IssueList) (map[int64]*models.Commi status, err := getLastCommitStatus(gitRepo, issue.PullRequest) if err != nil { - return nil, err + log.Error("getLastCommitStatus: cant get last commit of pull [%d]: %v", issue.PullRequest.ID, err) + continue } res[issue.PullRequest.ID] = status } diff --git a/services/pull/pull_test.go b/services/pull/pull_test.go index 64920e355096..81627ebb77bc 100644 --- a/services/pull/pull_test.go +++ b/services/pull/pull_test.go @@ -5,4 +5,27 @@ package pull +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + // TODO TestPullRequest_PushToBaseRepo + +func TestPullRequest_CommitMessageTrailersPattern(t *testing.T) { + // Not a valid trailer section + assert.False(t, commitMessageTrailersPattern.MatchString("")) + assert.False(t, commitMessageTrailersPattern.MatchString("No trailer.")) + assert.False(t, commitMessageTrailersPattern.MatchString("Signed-off-by: Bob \nNot a trailer due to following text.")) + assert.False(t, commitMessageTrailersPattern.MatchString("Message body not correctly separated from trailer section by empty line.\nSigned-off-by: Bob ")) + // Valid trailer section + assert.True(t, commitMessageTrailersPattern.MatchString("Signed-off-by: Bob ")) + assert.True(t, commitMessageTrailersPattern.MatchString("Signed-off-by: Bob \nOther-Trailer: Value")) + assert.True(t, commitMessageTrailersPattern.MatchString("Message body correctly separated from trailer section by empty line.\n\nSigned-off-by: Bob ")) + assert.True(t, commitMessageTrailersPattern.MatchString("Multiple trailers.\n\nSigned-off-by: Bob \nOther-Trailer: Value")) + assert.True(t, commitMessageTrailersPattern.MatchString("Newline after trailer section.\n\nSigned-off-by: Bob \n")) + assert.True(t, commitMessageTrailersPattern.MatchString("No space after colon is accepted.\n\nSigned-off-by:Bob ")) + assert.True(t, commitMessageTrailersPattern.MatchString("Additional whitespace is accepted.\n\nSigned-off-by \t : \tBob ")) + assert.True(t, commitMessageTrailersPattern.MatchString("Folded value.\n\nFolded-trailer: This is\n a folded\n trailer value\nOther-Trailer: Value")) +} diff --git a/services/pull/review.go b/services/pull/review.go index 4b647722fcb2..b07e21fad977 100644 --- a/services/pull/review.go +++ b/services/pull/review.go @@ -100,7 +100,7 @@ func CreateCodeComment(doer *models.User, gitRepo *git.Repository, issue *models if !isReview && !existsReview { // Submit the review we've just created so the comment shows up in the issue view - if _, _, err = SubmitReview(doer, gitRepo, issue, models.ReviewTypeComment, "", latestCommitID); err != nil { + if _, _, err = SubmitReview(doer, gitRepo, issue, models.ReviewTypeComment, "", latestCommitID, nil); err != nil { return nil, err } } @@ -215,7 +215,7 @@ func createCodeComment(doer *models.User, repo *models.Repository, issue *models } // SubmitReview creates a review out of the existing pending review or creates a new one if no pending review exist -func SubmitReview(doer *models.User, gitRepo *git.Repository, issue *models.Issue, reviewType models.ReviewType, content, commitID string) (*models.Review, *models.Comment, error) { +func SubmitReview(doer *models.User, gitRepo *git.Repository, issue *models.Issue, reviewType models.ReviewType, content, commitID string, attachmentUUIDs []string) (*models.Review, *models.Comment, error) { pr, err := issue.GetPullRequest() if err != nil { return nil, nil, err @@ -240,7 +240,7 @@ func SubmitReview(doer *models.User, gitRepo *git.Repository, issue *models.Issu } } - review, comm, err := models.SubmitReview(doer, issue, reviewType, content, commitID, stale) + review, comm, err := models.SubmitReview(doer, issue, reviewType, content, commitID, stale, attachmentUUIDs) if err != nil { return nil, nil, err } diff --git a/services/pull/temp_repo.go b/services/pull/temp_repo.go index 45cd10b65bbe..19b488790a0c 100644 --- a/services/pull/temp_repo.go +++ b/services/pull/temp_repo.go @@ -141,10 +141,15 @@ func createTemporaryRepo(pr *models.PullRequest) (string, error) { trackingBranch := "tracking" // Fetch head branch if err := git.NewCommand("fetch", "--no-tags", remoteRepoName, git.BranchPrefix+pr.HeadBranch+":"+trackingBranch).RunInDirPipeline(tmpBasePath, &outbuf, &errbuf); err != nil { - log.Error("Unable to fetch head_repo head branch [%s:%s -> tracking in %s]: %v:\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, tmpBasePath, err, outbuf.String(), errbuf.String()) if err := models.RemoveTemporaryPath(tmpBasePath); err != nil { log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err) } + if !git.IsBranchExist(pr.HeadRepo.RepoPath(), pr.HeadBranch) { + return "", models.ErrBranchDoesNotExist{ + BranchName: pr.HeadBranch, + } + } + log.Error("Unable to fetch head_repo head branch [%s:%s -> tracking in %s]: %v:\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, tmpBasePath, err, outbuf.String(), errbuf.String()) return "", fmt.Errorf("Unable to fetch head_repo head branch [%s:%s -> tracking in tmpBasePath]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, err, outbuf.String(), errbuf.String()) } outbuf.Reset() diff --git a/services/pull/update.go b/services/pull/update.go index f4f7859a49ec..f35e47cbf820 100644 --- a/services/pull/update.go +++ b/services/pull/update.go @@ -88,7 +88,9 @@ func GetDiverging(pr *models.PullRequest) (*git.DivergeObject, error) { tmpRepo, err := createTemporaryRepo(pr) if err != nil { - log.Error("CreateTemporaryPath: %v", err) + if !models.IsErrBranchDoesNotExist(err) { + log.Error("CreateTemporaryRepo: %v", err) + } return nil, err } defer func() { diff --git a/services/release/release.go b/services/release/release.go index 9d201edf6d28..6f5aa02c85d8 100644 --- a/services/release/release.go +++ b/services/release/release.go @@ -23,6 +23,25 @@ func createTag(gitRepo *git.Repository, rel *models.Release, msg string) (bool, // Only actual create when publish. if !rel.IsDraft { if !gitRepo.IsTagExist(rel.TagName) { + if err := rel.LoadAttributes(); err != nil { + log.Error("LoadAttributes: %v", err) + return false, err + } + + protectedTags, err := rel.Repo.GetProtectedTags() + if err != nil { + return false, fmt.Errorf("GetProtectedTags: %v", err) + } + isAllowed, err := models.IsUserAllowedToControlTag(protectedTags, rel.TagName, rel.PublisherID) + if err != nil { + return false, err + } + if !isAllowed { + return false, models.ErrProtectedTagName{ + TagName: rel.TagName, + } + } + commit, err := gitRepo.GetCommit(rel.Target) if err != nil { return false, fmt.Errorf("GetCommit: %v", err) @@ -49,11 +68,7 @@ func createTag(gitRepo *git.Repository, rel *models.Release, msg string) (bool, } created = true rel.LowerTagName = strings.ToLower(rel.TagName) - // Prepare Notify - if err := rel.LoadAttributes(); err != nil { - log.Error("LoadAttributes: %v", err) - return false, err - } + notification.NotifyPushCommits( rel.Publisher, rel.Repo, &repository.PushUpdateOptions{ @@ -137,7 +152,9 @@ func CreateNewTag(doer *models.User, repo *models.Repository, commit, tagName, m rel := &models.Release{ RepoID: repo.ID, + Repo: repo, PublisherID: doer.ID, + Publisher: doer, TagName: tagName, Target: commit, IsDraft: false, diff --git a/services/release/release_test.go b/services/release/release_test.go index 102e3d7e0c0e..9f665fabab6f 100644 --- a/services/release/release_test.go +++ b/services/release/release_test.go @@ -33,7 +33,9 @@ func TestRelease_Create(t *testing.T) { assert.NoError(t, CreateRelease(gitRepo, &models.Release{ RepoID: repo.ID, + Repo: repo, PublisherID: user.ID, + Publisher: user, TagName: "v0.1", Target: "master", Title: "v0.1 is released", @@ -45,7 +47,9 @@ func TestRelease_Create(t *testing.T) { assert.NoError(t, CreateRelease(gitRepo, &models.Release{ RepoID: repo.ID, + Repo: repo, PublisherID: user.ID, + Publisher: user, TagName: "v0.1.1", Target: "65f1bf27bc3bf70f64657658635e66094edbcb4d", Title: "v0.1.1 is released", @@ -57,7 +61,9 @@ func TestRelease_Create(t *testing.T) { assert.NoError(t, CreateRelease(gitRepo, &models.Release{ RepoID: repo.ID, + Repo: repo, PublisherID: user.ID, + Publisher: user, TagName: "v0.1.2", Target: "65f1bf2", Title: "v0.1.2 is released", @@ -69,7 +75,9 @@ func TestRelease_Create(t *testing.T) { assert.NoError(t, CreateRelease(gitRepo, &models.Release{ RepoID: repo.ID, + Repo: repo, PublisherID: user.ID, + Publisher: user, TagName: "v0.1.3", Target: "65f1bf2", Title: "v0.1.3 is released", @@ -81,7 +89,9 @@ func TestRelease_Create(t *testing.T) { assert.NoError(t, CreateRelease(gitRepo, &models.Release{ RepoID: repo.ID, + Repo: repo, PublisherID: user.ID, + Publisher: user, TagName: "v0.1.4", Target: "65f1bf2", Title: "v0.1.4 is released", @@ -99,7 +109,9 @@ func TestRelease_Create(t *testing.T) { var release = models.Release{ RepoID: repo.ID, + Repo: repo, PublisherID: user.ID, + Publisher: user, TagName: "v0.1.5", Target: "65f1bf2", Title: "v0.1.5 is released", @@ -125,7 +137,9 @@ func TestRelease_Update(t *testing.T) { // Test a changed release assert.NoError(t, CreateRelease(gitRepo, &models.Release{ RepoID: repo.ID, + Repo: repo, PublisherID: user.ID, + Publisher: user, TagName: "v1.1.1", Target: "master", Title: "v1.1.1 is released", @@ -147,7 +161,9 @@ func TestRelease_Update(t *testing.T) { // Test a changed draft assert.NoError(t, CreateRelease(gitRepo, &models.Release{ RepoID: repo.ID, + Repo: repo, PublisherID: user.ID, + Publisher: user, TagName: "v1.2.1", Target: "65f1bf2", Title: "v1.2.1 is draft", @@ -169,7 +185,9 @@ func TestRelease_Update(t *testing.T) { // Test a changed pre-release assert.NoError(t, CreateRelease(gitRepo, &models.Release{ RepoID: repo.ID, + Repo: repo, PublisherID: user.ID, + Publisher: user, TagName: "v1.3.1", Target: "65f1bf2", Title: "v1.3.1 is pre-released", @@ -192,7 +210,9 @@ func TestRelease_Update(t *testing.T) { // Test create release release = &models.Release{ RepoID: repo.ID, + Repo: repo, PublisherID: user.ID, + Publisher: user, TagName: "v1.1.2", Target: "master", Title: "v1.1.2 is released", @@ -221,7 +241,7 @@ func TestRelease_Update(t *testing.T) { assert.NoError(t, UpdateRelease(user, gitRepo, release, []string{attach.UUID}, nil, nil)) assert.NoError(t, models.GetReleaseAttachments(release)) - assert.EqualValues(t, 1, len(release.Attachments)) + assert.Len(t, release.Attachments, 1) assert.EqualValues(t, attach.UUID, release.Attachments[0].UUID) assert.EqualValues(t, release.ID, release.Attachments[0].ReleaseID) assert.EqualValues(t, attach.Name, release.Attachments[0].Name) @@ -232,7 +252,7 @@ func TestRelease_Update(t *testing.T) { })) release.Attachments = nil assert.NoError(t, models.GetReleaseAttachments(release)) - assert.EqualValues(t, 1, len(release.Attachments)) + assert.Len(t, release.Attachments, 1) assert.EqualValues(t, attach.UUID, release.Attachments[0].UUID) assert.EqualValues(t, release.ID, release.Attachments[0].ReleaseID) assert.EqualValues(t, "test2.txt", release.Attachments[0].Name) @@ -241,7 +261,7 @@ func TestRelease_Update(t *testing.T) { assert.NoError(t, UpdateRelease(user, gitRepo, release, nil, []string{attach.UUID}, nil)) release.Attachments = nil assert.NoError(t, models.GetReleaseAttachments(release)) - assert.EqualValues(t, 0, len(release.Attachments)) + assert.Empty(t, release.Attachments) } func TestRelease_createTag(t *testing.T) { @@ -258,7 +278,9 @@ func TestRelease_createTag(t *testing.T) { // Test a changed release release := &models.Release{ RepoID: repo.ID, + Repo: repo, PublisherID: user.ID, + Publisher: user, TagName: "v2.1.1", Target: "master", Title: "v2.1.1 is released", @@ -280,7 +302,9 @@ func TestRelease_createTag(t *testing.T) { // Test a changed draft release = &models.Release{ RepoID: repo.ID, + Repo: repo, PublisherID: user.ID, + Publisher: user, TagName: "v2.2.1", Target: "65f1bf2", Title: "v2.2.1 is draft", @@ -301,7 +325,9 @@ func TestRelease_createTag(t *testing.T) { // Test a changed pre-release release = &models.Release{ RepoID: repo.ID, + Repo: repo, PublisherID: user.ID, + Publisher: user, TagName: "v2.3.1", Target: "65f1bf2", Title: "v2.3.1 is pre-released", diff --git a/services/repository/branch.go b/services/repository/branch.go new file mode 100644 index 000000000000..28d24f121d06 --- /dev/null +++ b/services/repository/branch.go @@ -0,0 +1,72 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package repository + +import ( + "errors" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/log" + repo_module "code.gitea.io/gitea/modules/repository" + pull_service "code.gitea.io/gitea/services/pull" +) + +// enmuerates all branch related errors +var ( + ErrBranchIsDefault = errors.New("branch is default") + ErrBranchIsProtected = errors.New("branch is protected") +) + +// DeleteBranch delete branch +func DeleteBranch(doer *models.User, repo *models.Repository, gitRepo *git.Repository, branchName string) error { + if branchName == repo.DefaultBranch { + return ErrBranchIsDefault + } + + isProtected, err := repo.IsProtectedBranch(branchName) + if err != nil { + return err + } + + if isProtected { + return ErrBranchIsProtected + } + + commit, err := gitRepo.GetBranchCommit(branchName) + if err != nil { + return err + } + + if err := gitRepo.DeleteBranch(branchName, git.DeleteBranchOptions{ + Force: true, + }); err != nil { + return err + } + + if err := pull_service.CloseBranchPulls(doer, repo.ID, branchName); err != nil { + return err + } + + // Don't return error below this + if err := PushUpdate( + &repo_module.PushUpdateOptions{ + RefFullName: git.BranchPrefix + branchName, + OldCommitID: commit.ID.String(), + NewCommitID: git.EmptySHA, + PusherID: doer.ID, + PusherName: doer.Name, + RepoUserName: repo.OwnerName, + RepoName: repo.Name, + }); err != nil { + log.Error("Update: %v", err) + } + + if err := repo.AddDeletedBranch(branchName, commit.ID.String(), doer.ID); err != nil { + log.Warn("AddDeletedBranch: %v", err) + } + + return nil +} diff --git a/services/repository/generate.go b/services/repository/generate.go index 067f8f61d0a0..43cbb45a41f0 100644 --- a/services/repository/generate.go +++ b/services/repository/generate.go @@ -13,6 +13,12 @@ import ( // GenerateRepository generates a repository from a template func GenerateRepository(doer, owner *models.User, templateRepo *models.Repository, opts models.GenerateRepoOptions) (_ *models.Repository, err error) { + if !doer.IsAdmin && !owner.CanCreateRepo() { + return nil, models.ErrReachLimitOfRepo{ + Limit: owner.MaxRepoCreation, + } + } + var generateRepo *models.Repository if err = models.WithTx(func(ctx models.DBContext) error { generateRepo, err = repo_module.GenerateRepository(ctx, doer, owner, templateRepo, opts) diff --git a/services/repository/push.go b/services/repository/push.go index bed5c575fe81..26df6b8e45e6 100644 --- a/services/repository/push.go +++ b/services/repository/push.go @@ -95,7 +95,6 @@ func pushUpdates(optsList []*repo_module.PushUpdateOptions) error { if opts.IsNewRef() && opts.IsDelRef() { return fmt.Errorf("Old and new revisions are both %s", git.EmptySHA) } - var commits = &repo_module.PushCommits{} if opts.IsTag() { // If is tag reference if pusher == nil || pusher.ID != opts.PusherID { var err error @@ -192,23 +191,25 @@ func pushUpdates(optsList []*repo_module.PushUpdateOptions) error { } } - commits = repo_module.ListToPushCommits(l) + commits := repo_module.ListToPushCommits(l) + commits.HeadCommit = repo_module.CommitToPushCommit(newCommit) + + if err := repofiles.UpdateIssuesCommit(pusher, repo, commits.Commits, refName); err != nil { + log.Error("updateIssuesCommit: %v", err) + } + if len(commits.Commits) > setting.UI.FeedMaxCommitNum { commits.Commits = commits.Commits[:setting.UI.FeedMaxCommitNum] } commits.CompareURL = repo.ComposeCompareURL(opts.OldCommitID, opts.NewCommitID) notification.NotifyPushCommits(pusher, repo, opts, commits) - if err := repofiles.UpdateIssuesCommit(pusher, repo, commits.Commits, refName); err != nil { - log.Error("updateIssuesCommit: %v", err) - } - if err = models.RemoveDeletedBranch(repo.ID, branch); err != nil { log.Error("models.RemoveDeletedBranch %s/%s failed: %v", repo.ID, branch, err) } // Cache for big repository - if err := repo_module.CacheRef(repo, gitRepo, opts.RefFullName); err != nil { + if err := repo_module.CacheRef(graceful.GetManager().HammerContext(), repo, gitRepo, opts.RefFullName); err != nil { log.Error("repo_module.CacheRef %s/%s failed: %v", repo.ID, branch, err) } } else { diff --git a/services/repository/transfer.go b/services/repository/transfer.go index ec769190bdb4..bb323c1c0a23 100644 --- a/services/repository/transfer.go +++ b/services/repository/transfer.go @@ -94,6 +94,20 @@ func StartRepositoryTransfer(doer, newOwner *models.User, repo *models.Repositor } } + // In case the new owner would not have sufficient access to the repo, give access rights for read + hasAccess, err := models.HasAccess(newOwner.ID, repo) + if err != nil { + return err + } + if !hasAccess { + if err := repo.AddCollaborator(newOwner); err != nil { + return err + } + if err := repo.ChangeCollaborationAccessMode(newOwner.ID, models.AccessModeRead); err != nil { + return err + } + } + // Make repo as pending for transfer repo.Status = models.RepositoryPendingTransfer if err := models.CreatePendingRepositoryTransfer(doer, newOwner, repo.ID, teams); err != nil { diff --git a/services/repository/transfer_test.go b/services/repository/transfer_test.go index 052b8c995428..c92844674c6d 100644 --- a/services/repository/transfer_test.go +++ b/services/repository/transfer_test.go @@ -52,3 +52,24 @@ func TestTransferOwnership(t *testing.T) { models.CheckConsistencyFor(t, &models.Repository{}, &models.User{}, &models.Team{}) } + +func TestStartRepositoryTransferSetPermission(t *testing.T) { + assert.NoError(t, models.PrepareTestDatabase()) + + doer := models.AssertExistsAndLoadBean(t, &models.User{ID: 3}).(*models.User) + recipient := models.AssertExistsAndLoadBean(t, &models.User{ID: 5}).(*models.User) + repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 3}).(*models.Repository) + repo.Owner = models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User) + + hasAccess, err := models.HasAccess(recipient.ID, repo) + assert.NoError(t, err) + assert.False(t, hasAccess) + + assert.NoError(t, StartRepositoryTransfer(doer, recipient, repo, nil)) + + hasAccess, err = models.HasAccess(recipient.ID, repo) + assert.NoError(t, err) + assert.True(t, hasAccess) + + models.CheckConsistencyFor(t, &models.Repository{}, &models.User{}, &models.Team{}) +} diff --git a/services/webhook/deliver.go b/services/webhook/deliver.go index a417a9e846d4..8243fde1bb74 100644 --- a/services/webhook/deliver.go +++ b/services/webhook/deliver.go @@ -6,8 +6,13 @@ package webhook import ( "context" + "crypto/hmac" + "crypto/sha1" + "crypto/sha256" "crypto/tls" + "encoding/hex" "fmt" + "io" "io/ioutil" "net" "net/http" @@ -26,27 +31,32 @@ import ( // Deliver deliver hook task func Deliver(t *models.HookTask) error { + w, err := models.GetWebhookByID(t.HookID) + if err != nil { + return err + } + defer func() { err := recover() if err == nil { return } // There was a panic whilst delivering a hook... - log.Error("PANIC whilst trying to deliver webhook[%d] for repo[%d] to %s Panic: %v\nStacktrace: %s", t.ID, t.RepoID, t.URL, err, log.Stack(2)) + log.Error("PANIC whilst trying to deliver webhook[%d] for repo[%d] to %s Panic: %v\nStacktrace: %s", t.ID, t.RepoID, w.URL, err, log.Stack(2)) }() + t.IsDelivered = true var req *http.Request - var err error - switch t.HTTPMethod { + switch w.HTTPMethod { case "": log.Info("HTTP Method for webhook %d empty, setting to POST as default", t.ID) fallthrough case http.MethodPost: - switch t.ContentType { + switch w.ContentType { case models.ContentTypeJSON: - req, err = http.NewRequest("POST", t.URL, strings.NewReader(t.PayloadContent)) + req, err = http.NewRequest("POST", w.URL, strings.NewReader(t.PayloadContent)) if err != nil { return err } @@ -57,16 +67,15 @@ func Deliver(t *models.HookTask) error { "payload": []string{t.PayloadContent}, } - req, err = http.NewRequest("POST", t.URL, strings.NewReader(forms.Encode())) + req, err = http.NewRequest("POST", w.URL, strings.NewReader(forms.Encode())) if err != nil { - return err } req.Header.Set("Content-Type", "application/x-www-form-urlencoded") } case http.MethodGet: - u, err := url.Parse(t.URL) + u, err := url.Parse(w.URL) if err != nil { return err } @@ -78,31 +87,48 @@ func Deliver(t *models.HookTask) error { return err } case http.MethodPut: - switch t.Typ { + switch w.Type { case models.MATRIX: - req, err = getMatrixHookRequest(t) + req, err = getMatrixHookRequest(w, t) if err != nil { return err } default: - return fmt.Errorf("Invalid http method for webhook: [%d] %v", t.ID, t.HTTPMethod) + return fmt.Errorf("Invalid http method for webhook: [%d] %v", t.ID, w.HTTPMethod) } default: - return fmt.Errorf("Invalid http method for webhook: [%d] %v", t.ID, t.HTTPMethod) + return fmt.Errorf("Invalid http method for webhook: [%d] %v", t.ID, w.HTTPMethod) + } + + var signatureSHA1 string + var signatureSHA256 string + if len(w.Secret) > 0 { + sig1 := hmac.New(sha1.New, []byte(w.Secret)) + sig256 := hmac.New(sha256.New, []byte(w.Secret)) + _, err = io.MultiWriter(sig1, sig256).Write([]byte(t.PayloadContent)) + if err != nil { + log.Error("prepareWebhooks.sigWrite: %v", err) + } + signatureSHA1 = hex.EncodeToString(sig1.Sum(nil)) + signatureSHA256 = hex.EncodeToString(sig256.Sum(nil)) } req.Header.Add("X-Gitea-Delivery", t.UUID) req.Header.Add("X-Gitea-Event", t.EventType.Event()) - req.Header.Add("X-Gitea-Signature", t.Signature) + req.Header.Add("X-Gitea-Signature", signatureSHA256) req.Header.Add("X-Gogs-Delivery", t.UUID) req.Header.Add("X-Gogs-Event", t.EventType.Event()) - req.Header.Add("X-Gogs-Signature", t.Signature) + req.Header.Add("X-Gogs-Signature", signatureSHA256) + req.Header.Add("X-Hub-Signature", "sha1="+signatureSHA1) + req.Header.Add("X-Hub-Signature-256", "sha256="+signatureSHA256) req.Header["X-GitHub-Delivery"] = []string{t.UUID} req.Header["X-GitHub-Event"] = []string{t.EventType.Event()} // Record delivery information. t.RequestInfo = &models.HookRequest{ - Headers: map[string]string{}, + URL: req.URL.String(), + HTTPMethod: req.Method, + Headers: map[string]string{}, } for k, vals := range req.Header { t.RequestInfo.Headers[k] = strings.Join(vals, ",") @@ -125,11 +151,6 @@ func Deliver(t *models.HookTask) error { } // Update webhook last delivery status. - w, err := models.GetWebhookByID(t.HookID) - if err != nil { - log.Error("GetWebhookByID: %v", err) - return - } if t.IsSucceed { w.LastStatus = models.HookStatusSucceed } else { diff --git a/services/webhook/dingtalk.go b/services/webhook/dingtalk.go index 0401464a448a..49e161ea57b5 100644 --- a/services/webhook/dingtalk.go +++ b/services/webhook/dingtalk.go @@ -25,9 +25,6 @@ var ( _ PayloadConvertor = &DingtalkPayload{} ) -// SetSecret sets the dingtalk secret -func (d *DingtalkPayload) SetSecret(_ string) {} - // JSONPayload Marshals the DingtalkPayload to json func (d *DingtalkPayload) JSONPayload() ([]byte, error) { json := jsoniter.ConfigCompatibleWithStandardLibrary @@ -44,16 +41,7 @@ func (d *DingtalkPayload) Create(p *api.CreatePayload) (api.Payloader, error) { refName := git.RefEndName(p.Ref) title := fmt.Sprintf("[%s] %s %s created", p.Repo.FullName, p.RefType, refName) - return &DingtalkPayload{ - MsgType: "actionCard", - ActionCard: dingtalk.ActionCard{ - Text: title, - Title: title, - HideAvatar: "0", - SingleTitle: fmt.Sprintf("view ref %s", refName), - SingleURL: p.Repo.HTMLURL + "/src/" + refName, - }, - }, nil + return createDingtalkPayload(title, title, fmt.Sprintf("view ref %s", refName), p.Repo.HTMLURL+"/src/"+refName), nil } // Delete implements PayloadConvertor Delete method @@ -62,32 +50,14 @@ func (d *DingtalkPayload) Delete(p *api.DeletePayload) (api.Payloader, error) { refName := git.RefEndName(p.Ref) title := fmt.Sprintf("[%s] %s %s deleted", p.Repo.FullName, p.RefType, refName) - return &DingtalkPayload{ - MsgType: "actionCard", - ActionCard: dingtalk.ActionCard{ - Text: title, - Title: title, - HideAvatar: "0", - SingleTitle: fmt.Sprintf("view ref %s", refName), - SingleURL: p.Repo.HTMLURL + "/src/" + refName, - }, - }, nil + return createDingtalkPayload(title, title, fmt.Sprintf("view ref %s", refName), p.Repo.HTMLURL+"/src/"+refName), nil } // Fork implements PayloadConvertor Fork method func (d *DingtalkPayload) Fork(p *api.ForkPayload) (api.Payloader, error) { title := fmt.Sprintf("%s is forked to %s", p.Forkee.FullName, p.Repo.FullName) - return &DingtalkPayload{ - MsgType: "actionCard", - ActionCard: dingtalk.ActionCard{ - Text: title, - Title: title, - HideAvatar: "0", - SingleTitle: fmt.Sprintf("view forked repo %s", p.Repo.FullName), - SingleURL: p.Repo.HTMLURL, - }, - }, nil + return createDingtalkPayload(title, title, fmt.Sprintf("view forked repo %s", p.Repo.FullName), p.Repo.HTMLURL), nil } // Push implements PayloadConvertor Push method @@ -124,70 +94,32 @@ func (d *DingtalkPayload) Push(p *api.PushPayload) (api.Payloader, error) { strings.TrimRight(commit.Message, "\r\n")) + authorName // add linebreak to each commit but the last if i < len(p.Commits)-1 { - text += "\n" + text += "\r\n" } } - return &DingtalkPayload{ - MsgType: "actionCard", - ActionCard: dingtalk.ActionCard{ - Text: text, - Title: title, - HideAvatar: "0", - SingleTitle: linkText, - SingleURL: titleLink, - }, - }, nil + return createDingtalkPayload(title, text, linkText, titleLink), nil } // Issue implements PayloadConvertor Issue method func (d *DingtalkPayload) Issue(p *api.IssuePayload) (api.Payloader, error) { text, issueTitle, attachmentText, _ := getIssuesPayloadInfo(p, noneLinkFormatter, true) - return &DingtalkPayload{ - MsgType: "actionCard", - ActionCard: dingtalk.ActionCard{ - Text: text + "\r\n\r\n" + attachmentText, - //Markdown: "# " + title + "\n" + text, - Title: issueTitle, - HideAvatar: "0", - SingleTitle: "view issue", - SingleURL: p.Issue.HTMLURL, - }, - }, nil + return createDingtalkPayload(issueTitle, text+"\r\n\r\n"+attachmentText, "view issue", p.Issue.HTMLURL), nil } // IssueComment implements PayloadConvertor IssueComment method func (d *DingtalkPayload) IssueComment(p *api.IssueCommentPayload) (api.Payloader, error) { text, issueTitle, _ := getIssueCommentPayloadInfo(p, noneLinkFormatter, true) - return &DingtalkPayload{ - MsgType: "actionCard", - ActionCard: dingtalk.ActionCard{ - Text: text + "\r\n\r\n" + p.Comment.Body, - Title: issueTitle, - HideAvatar: "0", - SingleTitle: "view issue comment", - SingleURL: p.Comment.HTMLURL, - }, - }, nil + return createDingtalkPayload(issueTitle, text+"\r\n\r\n"+p.Comment.Body, "view issue comment", p.Comment.HTMLURL), nil } // PullRequest implements PayloadConvertor PullRequest method func (d *DingtalkPayload) PullRequest(p *api.PullRequestPayload) (api.Payloader, error) { text, issueTitle, attachmentText, _ := getPullRequestPayloadInfo(p, noneLinkFormatter, true) - return &DingtalkPayload{ - MsgType: "actionCard", - ActionCard: dingtalk.ActionCard{ - Text: text + "\r\n\r\n" + attachmentText, - //Markdown: "# " + title + "\n" + text, - Title: issueTitle, - HideAvatar: "0", - SingleTitle: "view pull request", - SingleURL: p.PullRequest.HTMLURL, - }, - }, nil + return createDingtalkPayload(issueTitle, text+"\r\n\r\n"+attachmentText, "view pull request", p.PullRequest.HTMLURL), nil } // Review implements PayloadConvertor Review method @@ -205,37 +137,17 @@ func (d *DingtalkPayload) Review(p *api.PullRequestPayload, event models.HookEve } - return &DingtalkPayload{ - MsgType: "actionCard", - ActionCard: dingtalk.ActionCard{ - Text: title + "\r\n\r\n" + text, - Title: title, - HideAvatar: "0", - SingleTitle: "view pull request", - SingleURL: p.PullRequest.HTMLURL, - }, - }, nil + return createDingtalkPayload(title, title+"\r\n\r\n"+text, "view pull request", p.PullRequest.HTMLURL), nil } // Repository implements PayloadConvertor Repository method func (d *DingtalkPayload) Repository(p *api.RepositoryPayload) (api.Payloader, error) { - var title, url string switch p.Action { case api.HookRepoCreated: - title = fmt.Sprintf("[%s] Repository created", p.Repository.FullName) - url = p.Repository.HTMLURL - return &DingtalkPayload{ - MsgType: "actionCard", - ActionCard: dingtalk.ActionCard{ - Text: title, - Title: title, - HideAvatar: "0", - SingleTitle: "view repository", - SingleURL: url, - }, - }, nil + title := fmt.Sprintf("[%s] Repository created", p.Repository.FullName) + return createDingtalkPayload(title, title, "view repository", p.Repository.HTMLURL), nil case api.HookRepoDeleted: - title = fmt.Sprintf("[%s] Repository deleted", p.Repository.FullName) + title := fmt.Sprintf("[%s] Repository deleted", p.Repository.FullName) return &DingtalkPayload{ MsgType: "text", Text: struct { @@ -253,16 +165,20 @@ func (d *DingtalkPayload) Repository(p *api.RepositoryPayload) (api.Payloader, e func (d *DingtalkPayload) Release(p *api.ReleasePayload) (api.Payloader, error) { text, _ := getReleasePayloadInfo(p, noneLinkFormatter, true) + return createDingtalkPayload(text, text, "view release", p.Release.URL), nil +} + +func createDingtalkPayload(title, text, singleTitle, singleURL string) *DingtalkPayload { return &DingtalkPayload{ MsgType: "actionCard", ActionCard: dingtalk.ActionCard{ - Text: text, - Title: text, + Text: strings.TrimSpace(text), + Title: strings.TrimSpace(title), HideAvatar: "0", - SingleTitle: "view release", - SingleURL: p.Release.URL, + SingleTitle: singleTitle, + SingleURL: singleURL, }, - }, nil + } } // GetDingtalkPayload converts a ding talk webhook into a DingtalkPayload diff --git a/services/webhook/dingtalk_test.go b/services/webhook/dingtalk_test.go index e5aa0fca36ab..213ad1a284ec 100644 --- a/services/webhook/dingtalk_test.go +++ b/services/webhook/dingtalk_test.go @@ -7,25 +7,202 @@ package webhook import ( "testing" + "code.gitea.io/gitea/models" api "code.gitea.io/gitea/modules/structs" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -func TestGetDingTalkIssuesPayload(t *testing.T) { - p := issueTestPayload() - d := new(DingtalkPayload) - p.Action = api.HookIssueOpened - pl, err := d.Issue(p) +func TestDingTalkPayload(t *testing.T) { + t.Run("Create", func(t *testing.T) { + p := createTestPayload() + + d := new(DingtalkPayload) + pl, err := d.Create(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DingtalkPayload{}, pl) + + assert.Equal(t, "[test/repo] branch test created", pl.(*DingtalkPayload).ActionCard.Text) + assert.Equal(t, "[test/repo] branch test created", pl.(*DingtalkPayload).ActionCard.Title) + assert.Equal(t, "view ref test", pl.(*DingtalkPayload).ActionCard.SingleTitle) + assert.Equal(t, "http://localhost:3000/test/repo/src/test", pl.(*DingtalkPayload).ActionCard.SingleURL) + }) + + t.Run("Delete", func(t *testing.T) { + p := deleteTestPayload() + + d := new(DingtalkPayload) + pl, err := d.Delete(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DingtalkPayload{}, pl) + + assert.Equal(t, "[test/repo] branch test deleted", pl.(*DingtalkPayload).ActionCard.Text) + assert.Equal(t, "[test/repo] branch test deleted", pl.(*DingtalkPayload).ActionCard.Title) + assert.Equal(t, "view ref test", pl.(*DingtalkPayload).ActionCard.SingleTitle) + assert.Equal(t, "http://localhost:3000/test/repo/src/test", pl.(*DingtalkPayload).ActionCard.SingleURL) + }) + + t.Run("Fork", func(t *testing.T) { + p := forkTestPayload() + + d := new(DingtalkPayload) + pl, err := d.Fork(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DingtalkPayload{}, pl) + + assert.Equal(t, "test/repo2 is forked to test/repo", pl.(*DingtalkPayload).ActionCard.Text) + assert.Equal(t, "test/repo2 is forked to test/repo", pl.(*DingtalkPayload).ActionCard.Title) + assert.Equal(t, "view forked repo test/repo", pl.(*DingtalkPayload).ActionCard.SingleTitle) + assert.Equal(t, "http://localhost:3000/test/repo", pl.(*DingtalkPayload).ActionCard.SingleURL) + }) + + t.Run("Push", func(t *testing.T) { + p := pushTestPayload() + + d := new(DingtalkPayload) + pl, err := d.Push(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DingtalkPayload{}, pl) + + assert.Equal(t, "[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1\r\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1", pl.(*DingtalkPayload).ActionCard.Text) + assert.Equal(t, "[test/repo:test] 2 new commits", pl.(*DingtalkPayload).ActionCard.Title) + assert.Equal(t, "view commit 2020558...2020558", pl.(*DingtalkPayload).ActionCard.SingleTitle) + assert.Equal(t, "http://localhost:3000/test/repo/src/test", pl.(*DingtalkPayload).ActionCard.SingleURL) + }) + + t.Run("Issue", func(t *testing.T) { + p := issueTestPayload() + + d := new(DingtalkPayload) + p.Action = api.HookIssueOpened + pl, err := d.Issue(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DingtalkPayload{}, pl) + + assert.Equal(t, "[test/repo] Issue opened: #2 crash by user1\r\n\r\nissue body", pl.(*DingtalkPayload).ActionCard.Text) + assert.Equal(t, "#2 crash", pl.(*DingtalkPayload).ActionCard.Title) + assert.Equal(t, "view issue", pl.(*DingtalkPayload).ActionCard.SingleTitle) + assert.Equal(t, "http://localhost:3000/test/repo/issues/2", pl.(*DingtalkPayload).ActionCard.SingleURL) + + p.Action = api.HookIssueClosed + pl, err = d.Issue(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DingtalkPayload{}, pl) + + assert.Equal(t, "[test/repo] Issue closed: #2 crash by user1", pl.(*DingtalkPayload).ActionCard.Text) + assert.Equal(t, "#2 crash", pl.(*DingtalkPayload).ActionCard.Title) + assert.Equal(t, "view issue", pl.(*DingtalkPayload).ActionCard.SingleTitle) + assert.Equal(t, "http://localhost:3000/test/repo/issues/2", pl.(*DingtalkPayload).ActionCard.SingleURL) + }) + + t.Run("IssueComment", func(t *testing.T) { + p := issueCommentTestPayload() + + d := new(DingtalkPayload) + pl, err := d.IssueComment(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DingtalkPayload{}, pl) + + assert.Equal(t, "[test/repo] New comment on issue #2 crash by user1\r\n\r\nmore info needed", pl.(*DingtalkPayload).ActionCard.Text) + assert.Equal(t, "#2 crash", pl.(*DingtalkPayload).ActionCard.Title) + assert.Equal(t, "view issue comment", pl.(*DingtalkPayload).ActionCard.SingleTitle) + assert.Equal(t, "http://localhost:3000/test/repo/issues/2#issuecomment-4", pl.(*DingtalkPayload).ActionCard.SingleURL) + }) + + t.Run("PullRequest", func(t *testing.T) { + p := pullRequestTestPayload() + + d := new(DingtalkPayload) + pl, err := d.PullRequest(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DingtalkPayload{}, pl) + + assert.Equal(t, "[test/repo] Pull request opened: #12 Fix bug by user1\r\n\r\nfixes bug #2", pl.(*DingtalkPayload).ActionCard.Text) + assert.Equal(t, "#12 Fix bug", pl.(*DingtalkPayload).ActionCard.Title) + assert.Equal(t, "view pull request", pl.(*DingtalkPayload).ActionCard.SingleTitle) + assert.Equal(t, "http://localhost:3000/test/repo/pulls/12", pl.(*DingtalkPayload).ActionCard.SingleURL) + }) + + t.Run("PullRequestComment", func(t *testing.T) { + p := pullRequestCommentTestPayload() + + d := new(DingtalkPayload) + pl, err := d.IssueComment(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DingtalkPayload{}, pl) + + assert.Equal(t, "[test/repo] New comment on pull request #12 Fix bug by user1\r\n\r\nchanges requested", pl.(*DingtalkPayload).ActionCard.Text) + assert.Equal(t, "#12 Fix bug", pl.(*DingtalkPayload).ActionCard.Title) + assert.Equal(t, "view issue comment", pl.(*DingtalkPayload).ActionCard.SingleTitle) + assert.Equal(t, "http://localhost:3000/test/repo/pulls/12#issuecomment-4", pl.(*DingtalkPayload).ActionCard.SingleURL) + }) + + t.Run("Review", func(t *testing.T) { + p := pullRequestTestPayload() + p.Action = api.HookIssueReviewed + + d := new(DingtalkPayload) + pl, err := d.Review(p, models.HookEventPullRequestReviewApproved) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DingtalkPayload{}, pl) + + assert.Equal(t, "[test/repo] Pull request review approved : #12 Fix bug\r\n\r\ngood job", pl.(*DingtalkPayload).ActionCard.Text) + assert.Equal(t, "[test/repo] Pull request review approved : #12 Fix bug", pl.(*DingtalkPayload).ActionCard.Title) + assert.Equal(t, "view pull request", pl.(*DingtalkPayload).ActionCard.SingleTitle) + assert.Equal(t, "http://localhost:3000/test/repo/pulls/12", pl.(*DingtalkPayload).ActionCard.SingleURL) + }) + + t.Run("Repository", func(t *testing.T) { + p := repositoryTestPayload() + + d := new(DingtalkPayload) + pl, err := d.Repository(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DingtalkPayload{}, pl) + + assert.Equal(t, "[test/repo] Repository created", pl.(*DingtalkPayload).ActionCard.Text) + assert.Equal(t, "[test/repo] Repository created", pl.(*DingtalkPayload).ActionCard.Title) + assert.Equal(t, "view repository", pl.(*DingtalkPayload).ActionCard.SingleTitle) + assert.Equal(t, "http://localhost:3000/test/repo", pl.(*DingtalkPayload).ActionCard.SingleURL) + }) + + t.Run("Release", func(t *testing.T) { + p := pullReleaseTestPayload() + + d := new(DingtalkPayload) + pl, err := d.Release(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DingtalkPayload{}, pl) + + assert.Equal(t, "[test/repo] Release created: v1.0 by user1", pl.(*DingtalkPayload).ActionCard.Text) + assert.Equal(t, "[test/repo] Release created: v1.0 by user1", pl.(*DingtalkPayload).ActionCard.Title) + assert.Equal(t, "view release", pl.(*DingtalkPayload).ActionCard.SingleTitle) + assert.Equal(t, "http://localhost:3000/api/v1/repos/test/repo/releases/2", pl.(*DingtalkPayload).ActionCard.SingleURL) + }) +} + +func TestDingTalkJSONPayload(t *testing.T) { + p := pushTestPayload() + + pl, err := new(DingtalkPayload).Push(p) require.NoError(t, err) require.NotNil(t, pl) - assert.Equal(t, "#2 crash", pl.(*DingtalkPayload).ActionCard.Title) - assert.Equal(t, "[test/repo] Issue opened: #2 crash by user1\r\n\r\n", pl.(*DingtalkPayload).ActionCard.Text) + require.IsType(t, &DingtalkPayload{}, pl) - p.Action = api.HookIssueClosed - pl, err = d.Issue(p) + json, err := pl.JSONPayload() require.NoError(t, err) - require.NotNil(t, pl) - assert.Equal(t, "#2 crash", pl.(*DingtalkPayload).ActionCard.Title) - assert.Equal(t, "[test/repo] Issue closed: #2 crash by user1\r\n\r\n", pl.(*DingtalkPayload).ActionCard.Text) + assert.NotEmpty(t, json) } diff --git a/services/webhook/discord.go b/services/webhook/discord.go index d28904715f61..ea3879f1980d 100644 --- a/services/webhook/discord.go +++ b/services/webhook/discord.go @@ -97,9 +97,6 @@ var ( redColor = color("ff3232") ) -// SetSecret sets the discord secret -func (d *DiscordPayload) SetSecret(_ string) {} - // JSONPayload Marshals the DiscordPayload to json func (d *DiscordPayload) JSONPayload() ([]byte, error) { json := jsoniter.ConfigCompatibleWithStandardLibrary @@ -120,22 +117,7 @@ func (d *DiscordPayload) Create(p *api.CreatePayload) (api.Payloader, error) { refName := git.RefEndName(p.Ref) title := fmt.Sprintf("[%s] %s %s created", p.Repo.FullName, p.RefType, refName) - return &DiscordPayload{ - Username: d.Username, - AvatarURL: d.AvatarURL, - Embeds: []DiscordEmbed{ - { - Title: title, - URL: p.Repo.HTMLURL + "/src/" + refName, - Color: greenColor, - Author: DiscordEmbedAuthor{ - Name: p.Sender.UserName, - URL: setting.AppURL + p.Sender.UserName, - IconURL: p.Sender.AvatarURL, - }, - }, - }, - }, nil + return d.createPayload(p.Sender, title, "", p.Repo.HTMLURL+"/src/"+refName, greenColor), nil } // Delete implements PayloadConvertor Delete method @@ -144,44 +126,14 @@ func (d *DiscordPayload) Delete(p *api.DeletePayload) (api.Payloader, error) { refName := git.RefEndName(p.Ref) title := fmt.Sprintf("[%s] %s %s deleted", p.Repo.FullName, p.RefType, refName) - return &DiscordPayload{ - Username: d.Username, - AvatarURL: d.AvatarURL, - Embeds: []DiscordEmbed{ - { - Title: title, - URL: p.Repo.HTMLURL + "/src/" + refName, - Color: redColor, - Author: DiscordEmbedAuthor{ - Name: p.Sender.UserName, - URL: setting.AppURL + p.Sender.UserName, - IconURL: p.Sender.AvatarURL, - }, - }, - }, - }, nil + return d.createPayload(p.Sender, title, "", p.Repo.HTMLURL+"/src/"+refName, redColor), nil } // Fork implements PayloadConvertor Fork method func (d *DiscordPayload) Fork(p *api.ForkPayload) (api.Payloader, error) { title := fmt.Sprintf("%s is forked to %s", p.Forkee.FullName, p.Repo.FullName) - return &DiscordPayload{ - Username: d.Username, - AvatarURL: d.AvatarURL, - Embeds: []DiscordEmbed{ - { - Title: title, - URL: p.Repo.HTMLURL, - Color: greenColor, - Author: DiscordEmbedAuthor{ - Name: p.Sender.UserName, - URL: setting.AppURL + p.Sender.UserName, - IconURL: p.Sender.AvatarURL, - }, - }, - }, - }, nil + return d.createPayload(p.Sender, title, "", p.Repo.HTMLURL, greenColor), nil } // Push implements PayloadConvertor Push method @@ -216,92 +168,28 @@ func (d *DiscordPayload) Push(p *api.PushPayload) (api.Payloader, error) { } } - return &DiscordPayload{ - Username: d.Username, - AvatarURL: d.AvatarURL, - Embeds: []DiscordEmbed{ - { - Title: title, - Description: text, - URL: titleLink, - Color: greenColor, - Author: DiscordEmbedAuthor{ - Name: p.Sender.UserName, - URL: setting.AppURL + p.Sender.UserName, - IconURL: p.Sender.AvatarURL, - }, - }, - }, - }, nil + return d.createPayload(p.Sender, title, text, titleLink, greenColor), nil } // Issue implements PayloadConvertor Issue method func (d *DiscordPayload) Issue(p *api.IssuePayload) (api.Payloader, error) { - text, _, attachmentText, color := getIssuesPayloadInfo(p, noneLinkFormatter, false) + title, _, text, color := getIssuesPayloadInfo(p, noneLinkFormatter, false) - return &DiscordPayload{ - Username: d.Username, - AvatarURL: d.AvatarURL, - Embeds: []DiscordEmbed{ - { - Title: text, - Description: attachmentText, - URL: p.Issue.HTMLURL, - Color: color, - Author: DiscordEmbedAuthor{ - Name: p.Sender.UserName, - URL: setting.AppURL + p.Sender.UserName, - IconURL: p.Sender.AvatarURL, - }, - }, - }, - }, nil + return d.createPayload(p.Sender, title, text, p.Issue.HTMLURL, color), nil } // IssueComment implements PayloadConvertor IssueComment method func (d *DiscordPayload) IssueComment(p *api.IssueCommentPayload) (api.Payloader, error) { - text, _, color := getIssueCommentPayloadInfo(p, noneLinkFormatter, false) + title, _, color := getIssueCommentPayloadInfo(p, noneLinkFormatter, false) - return &DiscordPayload{ - Username: d.Username, - AvatarURL: d.AvatarURL, - Embeds: []DiscordEmbed{ - { - Title: text, - Description: p.Comment.Body, - URL: p.Comment.HTMLURL, - Color: color, - Author: DiscordEmbedAuthor{ - Name: p.Sender.UserName, - URL: setting.AppURL + p.Sender.UserName, - IconURL: p.Sender.AvatarURL, - }, - }, - }, - }, nil + return d.createPayload(p.Sender, title, p.Comment.Body, p.Comment.HTMLURL, color), nil } // PullRequest implements PayloadConvertor PullRequest method func (d *DiscordPayload) PullRequest(p *api.PullRequestPayload) (api.Payloader, error) { - text, _, attachmentText, color := getPullRequestPayloadInfo(p, noneLinkFormatter, false) + title, _, text, color := getPullRequestPayloadInfo(p, noneLinkFormatter, false) - return &DiscordPayload{ - Username: d.Username, - AvatarURL: d.AvatarURL, - Embeds: []DiscordEmbed{ - { - Title: text, - Description: attachmentText, - URL: p.PullRequest.HTMLURL, - Color: color, - Author: DiscordEmbedAuthor{ - Name: p.Sender.UserName, - URL: setting.AppURL + p.Sender.UserName, - IconURL: p.Sender.AvatarURL, - }, - }, - }, - }, nil + return d.createPayload(p.Sender, title, text, p.PullRequest.HTMLURL, color), nil } // Review implements PayloadConvertor Review method @@ -330,23 +218,7 @@ func (d *DiscordPayload) Review(p *api.PullRequestPayload, event models.HookEven } } - return &DiscordPayload{ - Username: d.Username, - AvatarURL: d.AvatarURL, - Embeds: []DiscordEmbed{ - { - Title: title, - Description: text, - URL: p.PullRequest.HTMLURL, - Color: color, - Author: DiscordEmbedAuthor{ - Name: p.Sender.UserName, - URL: setting.AppURL + p.Sender.UserName, - IconURL: p.Sender.AvatarURL, - }, - }, - }, - }, nil + return d.createPayload(p.Sender, title, text, p.PullRequest.HTMLURL, color), nil } // Repository implements PayloadConvertor Repository method @@ -363,45 +235,14 @@ func (d *DiscordPayload) Repository(p *api.RepositoryPayload) (api.Payloader, er color = redColor } - return &DiscordPayload{ - Username: d.Username, - AvatarURL: d.AvatarURL, - Embeds: []DiscordEmbed{ - { - Title: title, - URL: url, - Color: color, - Author: DiscordEmbedAuthor{ - Name: p.Sender.UserName, - URL: setting.AppURL + p.Sender.UserName, - IconURL: p.Sender.AvatarURL, - }, - }, - }, - }, nil + return d.createPayload(p.Sender, title, "", url, color), nil } // Release implements PayloadConvertor Release method func (d *DiscordPayload) Release(p *api.ReleasePayload) (api.Payloader, error) { text, color := getReleasePayloadInfo(p, noneLinkFormatter, false) - return &DiscordPayload{ - Username: d.Username, - AvatarURL: d.AvatarURL, - Embeds: []DiscordEmbed{ - { - Title: text, - Description: p.Release.Note, - URL: p.Release.URL, - Color: color, - Author: DiscordEmbedAuthor{ - Name: p.Sender.UserName, - URL: setting.AppURL + p.Sender.UserName, - IconURL: p.Sender.AvatarURL, - }, - }, - }, - }, nil + return d.createPayload(p.Sender, text, p.Release.Note, p.Release.URL, color), nil } // GetDiscordPayload converts a discord webhook into a DiscordPayload @@ -433,3 +274,23 @@ func parseHookPullRequestEventType(event models.HookEventType) (string, error) { return "", errors.New("unknown event type") } } + +func (d *DiscordPayload) createPayload(s *api.User, title, text, url string, color int) *DiscordPayload { + return &DiscordPayload{ + Username: d.Username, + AvatarURL: d.AvatarURL, + Embeds: []DiscordEmbed{ + { + Title: title, + Description: text, + URL: url, + Color: color, + Author: DiscordEmbedAuthor{ + Name: s.UserName, + URL: setting.AppURL + s.UserName, + IconURL: s.AvatarURL, + }, + }, + }, + } +} diff --git a/services/webhook/discord_test.go b/services/webhook/discord_test.go new file mode 100644 index 000000000000..fd7d2856c78b --- /dev/null +++ b/services/webhook/discord_test.go @@ -0,0 +1,245 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package webhook + +import ( + "testing" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/setting" + api "code.gitea.io/gitea/modules/structs" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestDiscordPayload(t *testing.T) { + t.Run("Create", func(t *testing.T) { + p := createTestPayload() + + d := new(DiscordPayload) + pl, err := d.Create(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DiscordPayload{}, pl) + + assert.Len(t, pl.(*DiscordPayload).Embeds, 1) + assert.Equal(t, "[test/repo] branch test created", pl.(*DiscordPayload).Embeds[0].Title) + assert.Empty(t, pl.(*DiscordPayload).Embeds[0].Description) + assert.Equal(t, "http://localhost:3000/test/repo/src/test", pl.(*DiscordPayload).Embeds[0].URL) + assert.Equal(t, p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.Name) + assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.URL) + assert.Equal(t, p.Sender.AvatarURL, pl.(*DiscordPayload).Embeds[0].Author.IconURL) + }) + + t.Run("Delete", func(t *testing.T) { + p := deleteTestPayload() + + d := new(DiscordPayload) + pl, err := d.Delete(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DiscordPayload{}, pl) + + assert.Len(t, pl.(*DiscordPayload).Embeds, 1) + assert.Equal(t, "[test/repo] branch test deleted", pl.(*DiscordPayload).Embeds[0].Title) + assert.Empty(t, pl.(*DiscordPayload).Embeds[0].Description) + assert.Equal(t, "http://localhost:3000/test/repo/src/test", pl.(*DiscordPayload).Embeds[0].URL) + assert.Equal(t, p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.Name) + assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.URL) + assert.Equal(t, p.Sender.AvatarURL, pl.(*DiscordPayload).Embeds[0].Author.IconURL) + }) + + t.Run("Fork", func(t *testing.T) { + p := forkTestPayload() + + d := new(DiscordPayload) + pl, err := d.Fork(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DiscordPayload{}, pl) + + assert.Len(t, pl.(*DiscordPayload).Embeds, 1) + assert.Equal(t, "test/repo2 is forked to test/repo", pl.(*DiscordPayload).Embeds[0].Title) + assert.Empty(t, pl.(*DiscordPayload).Embeds[0].Description) + assert.Equal(t, "http://localhost:3000/test/repo", pl.(*DiscordPayload).Embeds[0].URL) + assert.Equal(t, p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.Name) + assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.URL) + assert.Equal(t, p.Sender.AvatarURL, pl.(*DiscordPayload).Embeds[0].Author.IconURL) + }) + + t.Run("Push", func(t *testing.T) { + p := pushTestPayload() + + d := new(DiscordPayload) + pl, err := d.Push(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DiscordPayload{}, pl) + + assert.Len(t, pl.(*DiscordPayload).Embeds, 1) + assert.Equal(t, "[test/repo:test] 2 new commits", pl.(*DiscordPayload).Embeds[0].Title) + assert.Equal(t, "[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1", pl.(*DiscordPayload).Embeds[0].Description) + assert.Equal(t, "http://localhost:3000/test/repo/src/test", pl.(*DiscordPayload).Embeds[0].URL) + assert.Equal(t, p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.Name) + assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.URL) + assert.Equal(t, p.Sender.AvatarURL, pl.(*DiscordPayload).Embeds[0].Author.IconURL) + }) + + t.Run("Issue", func(t *testing.T) { + p := issueTestPayload() + + d := new(DiscordPayload) + p.Action = api.HookIssueOpened + pl, err := d.Issue(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DiscordPayload{}, pl) + + assert.Len(t, pl.(*DiscordPayload).Embeds, 1) + assert.Equal(t, "[test/repo] Issue opened: #2 crash", pl.(*DiscordPayload).Embeds[0].Title) + assert.Equal(t, "issue body", pl.(*DiscordPayload).Embeds[0].Description) + assert.Equal(t, "http://localhost:3000/test/repo/issues/2", pl.(*DiscordPayload).Embeds[0].URL) + assert.Equal(t, p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.Name) + assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.URL) + assert.Equal(t, p.Sender.AvatarURL, pl.(*DiscordPayload).Embeds[0].Author.IconURL) + + p.Action = api.HookIssueClosed + pl, err = d.Issue(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DiscordPayload{}, pl) + + assert.Len(t, pl.(*DiscordPayload).Embeds, 1) + assert.Equal(t, "[test/repo] Issue closed: #2 crash", pl.(*DiscordPayload).Embeds[0].Title) + assert.Empty(t, pl.(*DiscordPayload).Embeds[0].Description) + assert.Equal(t, "http://localhost:3000/test/repo/issues/2", pl.(*DiscordPayload).Embeds[0].URL) + assert.Equal(t, p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.Name) + assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.URL) + assert.Equal(t, p.Sender.AvatarURL, pl.(*DiscordPayload).Embeds[0].Author.IconURL) + }) + + t.Run("IssueComment", func(t *testing.T) { + p := issueCommentTestPayload() + + d := new(DiscordPayload) + pl, err := d.IssueComment(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DiscordPayload{}, pl) + + assert.Len(t, pl.(*DiscordPayload).Embeds, 1) + assert.Equal(t, "[test/repo] New comment on issue #2 crash", pl.(*DiscordPayload).Embeds[0].Title) + assert.Equal(t, "more info needed", pl.(*DiscordPayload).Embeds[0].Description) + assert.Equal(t, "http://localhost:3000/test/repo/issues/2#issuecomment-4", pl.(*DiscordPayload).Embeds[0].URL) + assert.Equal(t, p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.Name) + assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.URL) + assert.Equal(t, p.Sender.AvatarURL, pl.(*DiscordPayload).Embeds[0].Author.IconURL) + }) + + t.Run("PullRequest", func(t *testing.T) { + p := pullRequestTestPayload() + + d := new(DiscordPayload) + pl, err := d.PullRequest(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DiscordPayload{}, pl) + + assert.Len(t, pl.(*DiscordPayload).Embeds, 1) + assert.Equal(t, "[test/repo] Pull request opened: #12 Fix bug", pl.(*DiscordPayload).Embeds[0].Title) + assert.Equal(t, "fixes bug #2", pl.(*DiscordPayload).Embeds[0].Description) + assert.Equal(t, "http://localhost:3000/test/repo/pulls/12", pl.(*DiscordPayload).Embeds[0].URL) + assert.Equal(t, p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.Name) + assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.URL) + assert.Equal(t, p.Sender.AvatarURL, pl.(*DiscordPayload).Embeds[0].Author.IconURL) + }) + + t.Run("PullRequestComment", func(t *testing.T) { + p := pullRequestCommentTestPayload() + + d := new(DiscordPayload) + pl, err := d.IssueComment(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DiscordPayload{}, pl) + + assert.Len(t, pl.(*DiscordPayload).Embeds, 1) + assert.Equal(t, "[test/repo] New comment on pull request #12 Fix bug", pl.(*DiscordPayload).Embeds[0].Title) + assert.Equal(t, "changes requested", pl.(*DiscordPayload).Embeds[0].Description) + assert.Equal(t, "http://localhost:3000/test/repo/pulls/12#issuecomment-4", pl.(*DiscordPayload).Embeds[0].URL) + assert.Equal(t, p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.Name) + assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.URL) + assert.Equal(t, p.Sender.AvatarURL, pl.(*DiscordPayload).Embeds[0].Author.IconURL) + }) + + t.Run("Review", func(t *testing.T) { + p := pullRequestTestPayload() + p.Action = api.HookIssueReviewed + + d := new(DiscordPayload) + pl, err := d.Review(p, models.HookEventPullRequestReviewApproved) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DiscordPayload{}, pl) + + assert.Len(t, pl.(*DiscordPayload).Embeds, 1) + assert.Equal(t, "[test/repo] Pull request review approved: #12 Fix bug", pl.(*DiscordPayload).Embeds[0].Title) + assert.Equal(t, "good job", pl.(*DiscordPayload).Embeds[0].Description) + assert.Equal(t, "http://localhost:3000/test/repo/pulls/12", pl.(*DiscordPayload).Embeds[0].URL) + assert.Equal(t, p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.Name) + assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.URL) + assert.Equal(t, p.Sender.AvatarURL, pl.(*DiscordPayload).Embeds[0].Author.IconURL) + }) + + t.Run("Repository", func(t *testing.T) { + p := repositoryTestPayload() + + d := new(DiscordPayload) + pl, err := d.Repository(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DiscordPayload{}, pl) + + assert.Len(t, pl.(*DiscordPayload).Embeds, 1) + assert.Equal(t, "[test/repo] Repository created", pl.(*DiscordPayload).Embeds[0].Title) + assert.Empty(t, pl.(*DiscordPayload).Embeds[0].Description) + assert.Equal(t, "http://localhost:3000/test/repo", pl.(*DiscordPayload).Embeds[0].URL) + assert.Equal(t, p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.Name) + assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.URL) + assert.Equal(t, p.Sender.AvatarURL, pl.(*DiscordPayload).Embeds[0].Author.IconURL) + }) + + t.Run("Release", func(t *testing.T) { + p := pullReleaseTestPayload() + + d := new(DiscordPayload) + pl, err := d.Release(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DiscordPayload{}, pl) + + assert.Len(t, pl.(*DiscordPayload).Embeds, 1) + assert.Equal(t, "[test/repo] Release created: v1.0", pl.(*DiscordPayload).Embeds[0].Title) + assert.Equal(t, "Note of first stable release", pl.(*DiscordPayload).Embeds[0].Description) + assert.Equal(t, "http://localhost:3000/api/v1/repos/test/repo/releases/2", pl.(*DiscordPayload).Embeds[0].URL) + assert.Equal(t, p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.Name) + assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.(*DiscordPayload).Embeds[0].Author.URL) + assert.Equal(t, p.Sender.AvatarURL, pl.(*DiscordPayload).Embeds[0].Author.IconURL) + }) +} + +func TestDiscordJSONPayload(t *testing.T) { + p := pushTestPayload() + + pl, err := new(DiscordPayload).Push(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &DiscordPayload{}, pl) + + json, err := pl.JSONPayload() + require.NoError(t, err) + assert.NotEmpty(t, json) +} diff --git a/services/webhook/feishu.go b/services/webhook/feishu.go index 847a991f366c..b280e67759ce 100644 --- a/services/webhook/feishu.go +++ b/services/webhook/feishu.go @@ -30,14 +30,11 @@ func newFeishuTextPayload(text string) *FeishuPayload { Content: struct { Text string `json:"text"` }{ - Text: text, + Text: strings.TrimSpace(text), }, } } -// SetSecret sets the Feishu secret -func (f *FeishuPayload) SetSecret(_ string) {} - // JSONPayload Marshals the FeishuPayload to json func (f *FeishuPayload) JSONPayload() ([]byte, error) { json := jsoniter.ConfigCompatibleWithStandardLibrary @@ -84,7 +81,7 @@ func (f *FeishuPayload) Push(p *api.PushPayload) (api.Payloader, error) { commitDesc string ) - var text = fmt.Sprintf("[%s:%s] %s\n", p.Repo.FullName, branchName, commitDesc) + var text = fmt.Sprintf("[%s:%s] %s\r\n", p.Repo.FullName, branchName, commitDesc) // for each commit, generate attachment text for i, commit := range p.Commits { var authorName string @@ -95,7 +92,7 @@ func (f *FeishuPayload) Push(p *api.PushPayload) (api.Payloader, error) { strings.TrimRight(commit.Message, "\r\n")) + authorName // add linebreak to each commit but the last if i < len(p.Commits)-1 { - text += "\n" + text += "\r\n" } } @@ -125,19 +122,14 @@ func (f *FeishuPayload) PullRequest(p *api.PullRequestPayload) (api.Payloader, e // Review implements PayloadConvertor Review method func (f *FeishuPayload) Review(p *api.PullRequestPayload, event models.HookEventType) (api.Payloader, error) { - var text, title string - switch p.Action { - case api.HookIssueSynchronized: - action, err := parseHookPullRequestEventType(event) - if err != nil { - return nil, err - } - - title = fmt.Sprintf("[%s] Pull request review %s : #%d %s", p.Repository.FullName, action, p.Index, p.PullRequest.Title) - text = p.Review.Content - + action, err := parseHookPullRequestEventType(event) + if err != nil { + return nil, err } + title := fmt.Sprintf("[%s] Pull request review %s : #%d %s", p.Repository.FullName, action, p.Index, p.PullRequest.Title) + text := p.Review.Content + return newFeishuTextPayload(title + "\r\n\r\n" + text), nil } diff --git a/services/webhook/feishu_test.go b/services/webhook/feishu_test.go new file mode 100644 index 000000000000..7f3508c145d5 --- /dev/null +++ b/services/webhook/feishu_test.go @@ -0,0 +1,172 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package webhook + +import ( + "testing" + + "code.gitea.io/gitea/models" + api "code.gitea.io/gitea/modules/structs" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestFeishuPayload(t *testing.T) { + t.Run("Create", func(t *testing.T) { + p := createTestPayload() + + d := new(FeishuPayload) + pl, err := d.Create(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &FeishuPayload{}, pl) + + assert.Equal(t, `[test/repo] branch test created`, pl.(*FeishuPayload).Content.Text) + }) + + t.Run("Delete", func(t *testing.T) { + p := deleteTestPayload() + + d := new(FeishuPayload) + pl, err := d.Delete(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &FeishuPayload{}, pl) + + assert.Equal(t, `[test/repo] branch test deleted`, pl.(*FeishuPayload).Content.Text) + }) + + t.Run("Fork", func(t *testing.T) { + p := forkTestPayload() + + d := new(FeishuPayload) + pl, err := d.Fork(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &FeishuPayload{}, pl) + + assert.Equal(t, `test/repo2 is forked to test/repo`, pl.(*FeishuPayload).Content.Text) + }) + + t.Run("Push", func(t *testing.T) { + p := pushTestPayload() + + d := new(FeishuPayload) + pl, err := d.Push(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &FeishuPayload{}, pl) + + assert.Equal(t, "[test/repo:test] \r\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1\r\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1", pl.(*FeishuPayload).Content.Text) + }) + + t.Run("Issue", func(t *testing.T) { + p := issueTestPayload() + + d := new(FeishuPayload) + p.Action = api.HookIssueOpened + pl, err := d.Issue(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &FeishuPayload{}, pl) + + assert.Equal(t, "#2 crash\r\n[test/repo] Issue opened: #2 crash by user1\r\n\r\nissue body", pl.(*FeishuPayload).Content.Text) + + p.Action = api.HookIssueClosed + pl, err = d.Issue(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &FeishuPayload{}, pl) + + assert.Equal(t, "#2 crash\r\n[test/repo] Issue closed: #2 crash by user1", pl.(*FeishuPayload).Content.Text) + }) + + t.Run("IssueComment", func(t *testing.T) { + p := issueCommentTestPayload() + + d := new(FeishuPayload) + pl, err := d.IssueComment(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &FeishuPayload{}, pl) + + assert.Equal(t, "#2 crash\r\n[test/repo] New comment on issue #2 crash by user1\r\n\r\nmore info needed", pl.(*FeishuPayload).Content.Text) + }) + + t.Run("PullRequest", func(t *testing.T) { + p := pullRequestTestPayload() + + d := new(FeishuPayload) + pl, err := d.PullRequest(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &FeishuPayload{}, pl) + + assert.Equal(t, "#12 Fix bug\r\n[test/repo] Pull request opened: #12 Fix bug by user1\r\n\r\nfixes bug #2", pl.(*FeishuPayload).Content.Text) + }) + + t.Run("PullRequestComment", func(t *testing.T) { + p := pullRequestCommentTestPayload() + + d := new(FeishuPayload) + pl, err := d.IssueComment(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &FeishuPayload{}, pl) + + assert.Equal(t, "#12 Fix bug\r\n[test/repo] New comment on pull request #12 Fix bug by user1\r\n\r\nchanges requested", pl.(*FeishuPayload).Content.Text) + }) + + t.Run("Review", func(t *testing.T) { + p := pullRequestTestPayload() + p.Action = api.HookIssueReviewed + + d := new(FeishuPayload) + pl, err := d.Review(p, models.HookEventPullRequestReviewApproved) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &FeishuPayload{}, pl) + + assert.Equal(t, "[test/repo] Pull request review approved : #12 Fix bug\r\n\r\ngood job", pl.(*FeishuPayload).Content.Text) + }) + + t.Run("Repository", func(t *testing.T) { + p := repositoryTestPayload() + + d := new(FeishuPayload) + pl, err := d.Repository(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &FeishuPayload{}, pl) + + assert.Equal(t, "[test/repo] Repository created", pl.(*FeishuPayload).Content.Text) + }) + + t.Run("Release", func(t *testing.T) { + p := pullReleaseTestPayload() + + d := new(FeishuPayload) + pl, err := d.Release(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &FeishuPayload{}, pl) + + assert.Equal(t, "[test/repo] Release created: v1.0 by user1", pl.(*FeishuPayload).Content.Text) + }) +} + +func TestFeishuJSONPayload(t *testing.T) { + p := pushTestPayload() + + pl, err := new(FeishuPayload).Push(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &FeishuPayload{}, pl) + + json, err := pl.JSONPayload() + require.NoError(t, err) + assert.NotEmpty(t, json) +} diff --git a/services/webhook/general.go b/services/webhook/general.go index ec247a24109b..777ae086b5e9 100644 --- a/services/webhook/general.go +++ b/services/webhook/general.go @@ -44,8 +44,11 @@ func getIssuesPayloadInfo(p *api.IssuePayload, linkFormatter linkFormatter, with case api.HookIssueEdited: text = fmt.Sprintf("[%s] Issue edited: %s", repoLink, titleLink) case api.HookIssueAssigned: - text = fmt.Sprintf("[%s] Issue assigned to %s: %s", repoLink, - linkFormatter(setting.AppURL+p.Issue.Assignee.UserName, p.Issue.Assignee.UserName), titleLink) + list := make([]string, len(p.Issue.Assignees)) + for i, user := range p.Issue.Assignees { + list[i] = linkFormatter(setting.AppURL+user.UserName, user.UserName) + } + text = fmt.Sprintf("[%s] Issue assigned to %s: %s", repoLink, strings.Join(list, ", "), titleLink) color = greenColor case api.HookIssueUnassigned: text = fmt.Sprintf("[%s] Issue unassigned: %s", repoLink, titleLink) @@ -102,7 +105,7 @@ func getPullRequestPayloadInfo(p *api.PullRequestPayload, linkFormatter linkForm for i, user := range p.PullRequest.Assignees { list[i] = linkFormatter(setting.AppURL+user.UserName, user.UserName) } - text = fmt.Sprintf("[%s] Pull request assigned: %s to %s", repoLink, + text = fmt.Sprintf("[%s] Pull request assigned to %s: %s", repoLink, strings.Join(list, ", "), titleLink) color = greenColor case api.HookIssueUnassigned: @@ -115,7 +118,7 @@ func getPullRequestPayloadInfo(p *api.PullRequestPayload, linkFormatter linkForm text = fmt.Sprintf("[%s] Pull request synchronized: %s", repoLink, titleLink) case api.HookIssueMilestoned: mileStoneLink := fmt.Sprintf("%s/milestone/%d", p.Repository.HTMLURL, p.PullRequest.Milestone.ID) - text = fmt.Sprintf("[%s] Pull request milestoned: %s to %s", repoLink, + text = fmt.Sprintf("[%s] Pull request milestoned to %s: %s", repoLink, linkFormatter(mileStoneLink, p.PullRequest.Milestone.Title), titleLink) case api.HookIssueDemilestoned: text = fmt.Sprintf("[%s] Pull request milestone cleared: %s", repoLink, titleLink) diff --git a/services/webhook/general_test.go b/services/webhook/general_test.go index 3033b578805b..4d73afe060a6 100644 --- a/services/webhook/general_test.go +++ b/services/webhook/general_test.go @@ -5,14 +5,111 @@ package webhook import ( + "testing" + api "code.gitea.io/gitea/modules/structs" + + "github.com/stretchr/testify/assert" ) +func createTestPayload() *api.CreatePayload { + return &api.CreatePayload{ + Sha: "2020558fe2e34debb818a514715839cabd25e777", + Ref: "refs/heads/test", + RefType: "branch", + Repo: &api.Repository{ + HTMLURL: "http://localhost:3000/test/repo", + Name: "repo", + FullName: "test/repo", + }, + Sender: &api.User{ + UserName: "user1", + AvatarURL: "http://localhost:3000/user1/avatar", + }, + } +} + +func deleteTestPayload() *api.DeletePayload { + return &api.DeletePayload{ + Ref: "refs/heads/test", + RefType: "branch", + Repo: &api.Repository{ + HTMLURL: "http://localhost:3000/test/repo", + Name: "repo", + FullName: "test/repo", + }, + Sender: &api.User{ + UserName: "user1", + AvatarURL: "http://localhost:3000/user1/avatar", + }, + } +} + +func forkTestPayload() *api.ForkPayload { + return &api.ForkPayload{ + Forkee: &api.Repository{ + HTMLURL: "http://localhost:3000/test/repo2", + Name: "repo2", + FullName: "test/repo2", + }, + Repo: &api.Repository{ + HTMLURL: "http://localhost:3000/test/repo", + Name: "repo", + FullName: "test/repo", + }, + Sender: &api.User{ + UserName: "user1", + AvatarURL: "http://localhost:3000/user1/avatar", + }, + } +} + +func pushTestPayload() *api.PushPayload { + commit := &api.PayloadCommit{ + ID: "2020558fe2e34debb818a514715839cabd25e778", + Message: "commit message", + URL: "http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778", + Author: &api.PayloadUser{ + Name: "user1", + Email: "user1@localhost", + UserName: "user1", + }, + Committer: &api.PayloadUser{ + Name: "user1", + Email: "user1@localhost", + UserName: "user1", + }, + } + + return &api.PushPayload{ + Ref: "refs/heads/test", + Before: "2020558fe2e34debb818a514715839cabd25e777", + After: "2020558fe2e34debb818a514715839cabd25e778", + CompareURL: "", + HeadCommit: commit, + Commits: []*api.PayloadCommit{commit, commit}, + Repo: &api.Repository{ + HTMLURL: "http://localhost:3000/test/repo", + Name: "repo", + FullName: "test/repo", + }, + Pusher: &api.User{ + UserName: "user1", + AvatarURL: "http://localhost:3000/user1/avatar", + }, + Sender: &api.User{ + UserName: "user1", + AvatarURL: "http://localhost:3000/user1/avatar", + }, + } +} + func issueTestPayload() *api.IssuePayload { return &api.IssuePayload{ Index: 2, Sender: &api.User{ - UserName: "user1", + UserName: "user1", + AvatarURL: "http://localhost:3000/user1/avatar", }, Repository: &api.Repository{ HTMLURL: "http://localhost:3000/test/repo", @@ -20,10 +117,23 @@ func issueTestPayload() *api.IssuePayload { FullName: "test/repo", }, Issue: &api.Issue{ - ID: 2, - Index: 2, - URL: "http://localhost:3000/api/v1/repos/test/repo/issues/2", - Title: "crash", + ID: 2, + Index: 2, + URL: "http://localhost:3000/api/v1/repos/test/repo/issues/2", + HTMLURL: "http://localhost:3000/test/repo/issues/2", + Title: "crash", + Body: "issue body", + Assignees: []*api.User{ + { + UserName: "user1", + AvatarURL: "http://localhost:3000/user1/avatar", + }, + }, + Milestone: &api.Milestone{ + ID: 1, + Title: "Milestone Title", + Description: "Milestone Description", + }, }, } } @@ -32,7 +142,8 @@ func issueCommentTestPayload() *api.IssueCommentPayload { return &api.IssueCommentPayload{ Action: api.HookIssueCommentCreated, Sender: &api.User{ - UserName: "user1", + UserName: "user1", + AvatarURL: "http://localhost:3000/user1/avatar", }, Repository: &api.Repository{ HTMLURL: "http://localhost:3000/test/repo", @@ -45,11 +156,12 @@ func issueCommentTestPayload() *api.IssueCommentPayload { Body: "more info needed", }, Issue: &api.Issue{ - ID: 2, - Index: 2, - URL: "http://localhost:3000/api/v1/repos/test/repo/issues/2", - Title: "crash", - Body: "this happened", + ID: 2, + Index: 2, + URL: "http://localhost:3000/api/v1/repos/test/repo/issues/2", + HTMLURL: "http://localhost:3000/test/repo/issues/2", + Title: "crash", + Body: "this happened", }, } } @@ -58,7 +170,8 @@ func pullRequestCommentTestPayload() *api.IssueCommentPayload { return &api.IssueCommentPayload{ Action: api.HookIssueCommentCreated, Sender: &api.User{ - UserName: "user1", + UserName: "user1", + AvatarURL: "http://localhost:3000/user1/avatar", }, Repository: &api.Repository{ HTMLURL: "http://localhost:3000/test/repo", @@ -66,16 +179,17 @@ func pullRequestCommentTestPayload() *api.IssueCommentPayload { FullName: "test/repo", }, Comment: &api.Comment{ - HTMLURL: "http://localhost:3000/test/repo/pulls/2#issuecomment-4", - PRURL: "http://localhost:3000/test/repo/pulls/2", + HTMLURL: "http://localhost:3000/test/repo/pulls/12#issuecomment-4", + PRURL: "http://localhost:3000/test/repo/pulls/12", Body: "changes requested", }, Issue: &api.Issue{ - ID: 2, - Index: 2, - URL: "http://localhost:3000/api/v1/repos/test/repo/issues/2", - Title: "Fix bug", - Body: "fixes bug #2", + ID: 12, + Index: 12, + URL: "http://localhost:3000/api/v1/repos/test/repo/pulls/12", + HTMLURL: "http://localhost:3000/test/repo/pulls/12", + Title: "Fix bug", + Body: "fixes bug #2", }, IsPull: true, } @@ -85,7 +199,8 @@ func pullReleaseTestPayload() *api.ReleasePayload { return &api.ReleasePayload{ Action: api.HookReleasePublished, Sender: &api.User{ - UserName: "user1", + UserName: "user1", + AvatarURL: "http://localhost:3000/user1/avatar", }, Repository: &api.Repository{ HTMLURL: "http://localhost:3000/test/repo", @@ -96,6 +211,7 @@ func pullReleaseTestPayload() *api.ReleasePayload { TagName: "v1.0", Target: "master", Title: "First stable release", + Note: "Note of first stable release", URL: "http://localhost:3000/api/v1/repos/test/repo/releases/2", }, } @@ -104,9 +220,10 @@ func pullReleaseTestPayload() *api.ReleasePayload { func pullRequestTestPayload() *api.PullRequestPayload { return &api.PullRequestPayload{ Action: api.HookIssueOpened, - Index: 2, + Index: 12, Sender: &api.User{ - UserName: "user1", + UserName: "user1", + AvatarURL: "http://localhost:3000/user1/avatar", }, Repository: &api.Repository{ HTMLURL: "http://localhost:3000/test/repo", @@ -114,12 +231,311 @@ func pullRequestTestPayload() *api.PullRequestPayload { FullName: "test/repo", }, PullRequest: &api.PullRequest{ - ID: 2, - Index: 2, + ID: 12, + Index: 12, URL: "http://localhost:3000/test/repo/pulls/12", + HTMLURL: "http://localhost:3000/test/repo/pulls/12", Title: "Fix bug", Body: "fixes bug #2", Mergeable: true, + Assignees: []*api.User{ + { + UserName: "user1", + AvatarURL: "http://localhost:3000/user1/avatar", + }, + }, + Milestone: &api.Milestone{ + ID: 1, + Title: "Milestone Title", + Description: "Milestone Description", + }, + }, + Review: &api.ReviewPayload{ + Content: "good job", + }, + } +} + +func repositoryTestPayload() *api.RepositoryPayload { + return &api.RepositoryPayload{ + Action: api.HookRepoCreated, + Sender: &api.User{ + UserName: "user1", + AvatarURL: "http://localhost:3000/user1/avatar", }, + Repository: &api.Repository{ + HTMLURL: "http://localhost:3000/test/repo", + Name: "repo", + FullName: "test/repo", + }, + } +} + +func TestGetIssuesPayloadInfo(t *testing.T) { + p := issueTestPayload() + + cases := []struct { + action api.HookIssueAction + text string + issueTitle string + attachmentText string + color int + }{ + { + api.HookIssueOpened, + "[test/repo] Issue opened: #2 crash by user1", + "#2 crash", + "issue body", + orangeColor, + }, + { + api.HookIssueClosed, + "[test/repo] Issue closed: #2 crash by user1", + "#2 crash", + "", + redColor, + }, + { + api.HookIssueReOpened, + "[test/repo] Issue re-opened: #2 crash by user1", + "#2 crash", + "", + yellowColor, + }, + { + api.HookIssueEdited, + "[test/repo] Issue edited: #2 crash by user1", + "#2 crash", + "issue body", + yellowColor, + }, + { + api.HookIssueAssigned, + "[test/repo] Issue assigned to user1: #2 crash by user1", + "#2 crash", + "", + greenColor, + }, + { + api.HookIssueUnassigned, + "[test/repo] Issue unassigned: #2 crash by user1", + "#2 crash", + "", + yellowColor, + }, + { + api.HookIssueLabelUpdated, + "[test/repo] Issue labels updated: #2 crash by user1", + "#2 crash", + "", + yellowColor, + }, + { + api.HookIssueLabelCleared, + "[test/repo] Issue labels cleared: #2 crash by user1", + "#2 crash", + "", + yellowColor, + }, + { + api.HookIssueSynchronized, + "[test/repo] Issue synchronized: #2 crash by user1", + "#2 crash", + "", + yellowColor, + }, + { + api.HookIssueMilestoned, + "[test/repo] Issue milestoned to Milestone Title: #2 crash by user1", + "#2 crash", + "", + yellowColor, + }, + { + api.HookIssueDemilestoned, + "[test/repo] Issue milestone cleared: #2 crash by user1", + "#2 crash", + "", + yellowColor, + }, + } + + for i, c := range cases { + p.Action = c.action + text, issueTitle, attachmentText, color := getIssuesPayloadInfo(p, noneLinkFormatter, true) + assert.Equal(t, c.text, text, "case %d", i) + assert.Equal(t, c.issueTitle, issueTitle, "case %d", i) + assert.Equal(t, c.attachmentText, attachmentText, "case %d", i) + assert.Equal(t, c.color, color, "case %d", i) + } +} + +func TestGetPullRequestPayloadInfo(t *testing.T) { + p := pullRequestTestPayload() + + cases := []struct { + action api.HookIssueAction + text string + issueTitle string + attachmentText string + color int + }{ + { + api.HookIssueOpened, + "[test/repo] Pull request opened: #12 Fix bug by user1", + "#12 Fix bug", + "fixes bug #2", + greenColor, + }, + { + api.HookIssueClosed, + "[test/repo] Pull request closed: #12 Fix bug by user1", + "#12 Fix bug", + "", + redColor, + }, + { + api.HookIssueReOpened, + "[test/repo] Pull request re-opened: #12 Fix bug by user1", + "#12 Fix bug", + "", + yellowColor, + }, + { + api.HookIssueEdited, + "[test/repo] Pull request edited: #12 Fix bug by user1", + "#12 Fix bug", + "fixes bug #2", + yellowColor, + }, + { + api.HookIssueAssigned, + "[test/repo] Pull request assigned to user1: #12 Fix bug by user1", + "#12 Fix bug", + "", + greenColor, + }, + { + api.HookIssueUnassigned, + "[test/repo] Pull request unassigned: #12 Fix bug by user1", + "#12 Fix bug", + "", + yellowColor, + }, + { + api.HookIssueLabelUpdated, + "[test/repo] Pull request labels updated: #12 Fix bug by user1", + "#12 Fix bug", + "", + yellowColor, + }, + { + api.HookIssueLabelCleared, + "[test/repo] Pull request labels cleared: #12 Fix bug by user1", + "#12 Fix bug", + "", + yellowColor, + }, + { + api.HookIssueSynchronized, + "[test/repo] Pull request synchronized: #12 Fix bug by user1", + "#12 Fix bug", + "", + yellowColor, + }, + { + api.HookIssueMilestoned, + "[test/repo] Pull request milestoned to Milestone Title: #12 Fix bug by user1", + "#12 Fix bug", + "", + yellowColor, + }, + { + api.HookIssueDemilestoned, + "[test/repo] Pull request milestone cleared: #12 Fix bug by user1", + "#12 Fix bug", + "", + yellowColor, + }, + } + + for i, c := range cases { + p.Action = c.action + text, issueTitle, attachmentText, color := getPullRequestPayloadInfo(p, noneLinkFormatter, true) + assert.Equal(t, c.text, text, "case %d", i) + assert.Equal(t, c.issueTitle, issueTitle, "case %d", i) + assert.Equal(t, c.attachmentText, attachmentText, "case %d", i) + assert.Equal(t, c.color, color, "case %d", i) + } +} + +func TestGetReleasePayloadInfo(t *testing.T) { + p := pullReleaseTestPayload() + + cases := []struct { + action api.HookReleaseAction + text string + color int + }{ + { + api.HookReleasePublished, + "[test/repo] Release created: v1.0 by user1", + greenColor, + }, + { + api.HookReleaseUpdated, + "[test/repo] Release updated: v1.0 by user1", + yellowColor, + }, + { + api.HookReleaseDeleted, + "[test/repo] Release deleted: v1.0 by user1", + redColor, + }, + } + + for i, c := range cases { + p.Action = c.action + text, color := getReleasePayloadInfo(p, noneLinkFormatter, true) + assert.Equal(t, c.text, text, "case %d", i) + assert.Equal(t, c.color, color, "case %d", i) + } +} + +func TestGetIssueCommentPayloadInfo(t *testing.T) { + p := pullRequestCommentTestPayload() + + cases := []struct { + action api.HookIssueCommentAction + text string + issueTitle string + color int + }{ + { + api.HookIssueCommentCreated, + "[test/repo] New comment on pull request #12 Fix bug by user1", + "#12 Fix bug", + greenColorLight, + }, + { + api.HookIssueCommentEdited, + "[test/repo] Comment edited on pull request #12 Fix bug by user1", + "#12 Fix bug", + yellowColor, + }, + { + api.HookIssueCommentDeleted, + "[test/repo] Comment deleted on pull request #12 Fix bug by user1", + "#12 Fix bug", + redColor, + }, + } + + for i, c := range cases { + p.Action = c.action + text, issueTitle, color := getIssueCommentPayloadInfo(p, noneLinkFormatter, true) + assert.Equal(t, c.text, text, "case %d", i) + assert.Equal(t, c.issueTitle, issueTitle, "case %d", i) + assert.Equal(t, c.color, color, "case %d", i) } } diff --git a/services/webhook/matrix.go b/services/webhook/matrix.go index 1658dd4b44e9..6fca67ca84f9 100644 --- a/services/webhook/matrix.go +++ b/services/webhook/matrix.go @@ -76,9 +76,6 @@ type MatrixPayloadSafe struct { Commits []*api.PayloadCommit `json:"io.gitea.commits,omitempty"` } -// SetSecret sets the Matrix secret -func (m *MatrixPayloadUnsafe) SetSecret(_ string) {} - // JSONPayload Marshals the MatrixPayloadUnsafe to json func (m *MatrixPayloadUnsafe) JSONPayload() ([]byte, error) { json := jsoniter.ConfigCompatibleWithStandardLibrary @@ -263,7 +260,7 @@ func getMessageBody(htmlText string) string { // getMatrixHookRequest creates a new request which contains an Authorization header. // The access_token is removed from t.PayloadContent -func getMatrixHookRequest(t *models.HookTask) (*http.Request, error) { +func getMatrixHookRequest(w *models.Webhook, t *models.HookTask) (*http.Request, error) { payloadunsafe := MatrixPayloadUnsafe{} json := jsoniter.ConfigCompatibleWithStandardLibrary if err := json.Unmarshal([]byte(t.PayloadContent), &payloadunsafe); err != nil { @@ -288,9 +285,9 @@ func getMatrixHookRequest(t *models.HookTask) (*http.Request, error) { return nil, fmt.Errorf("getMatrixHookRequest: unable to hash payload: %+v", err) } - t.URL = fmt.Sprintf("%s/%s", t.URL, txnID) + url := fmt.Sprintf("%s/%s", w.URL, txnID) - req, err := http.NewRequest(t.HTTPMethod, t.URL, strings.NewReader(string(payload))) + req, err := http.NewRequest(w.HTTPMethod, url, strings.NewReader(string(payload))) if err != nil { return nil, err } diff --git a/services/webhook/matrix_test.go b/services/webhook/matrix_test.go index 771146f2f30f..451dff69495f 100644 --- a/services/webhook/matrix_test.go +++ b/services/webhook/matrix_test.go @@ -14,74 +14,178 @@ import ( "github.com/stretchr/testify/require" ) -func TestMatrixIssuesPayloadOpened(t *testing.T) { - p := issueTestPayload() - m := new(MatrixPayloadUnsafe) +func TestMatrixPayload(t *testing.T) { + t.Run("Create", func(t *testing.T) { + p := createTestPayload() - p.Action = api.HookIssueOpened - pl, err := m.Issue(p) - require.NoError(t, err) - require.NotNil(t, pl) - assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] Issue opened: [#2 crash](http://localhost:3000/test/repo/issues/2) by [user1](https://try.gitea.io/user1)", pl.(*MatrixPayloadUnsafe).Body) - assert.Equal(t, "[test/repo] Issue opened: #2 crash by user1", pl.(*MatrixPayloadUnsafe).FormattedBody) + d := new(MatrixPayloadUnsafe) + pl, err := d.Create(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MatrixPayloadUnsafe{}, pl) - p.Action = api.HookIssueClosed - pl, err = m.Issue(p) - require.NoError(t, err) - require.NotNil(t, pl) - assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] Issue closed: [#2 crash](http://localhost:3000/test/repo/issues/2) by [user1](https://try.gitea.io/user1)", pl.(*MatrixPayloadUnsafe).Body) - assert.Equal(t, "[test/repo] Issue closed: #2 crash by user1", pl.(*MatrixPayloadUnsafe).FormattedBody) -} + assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo):[test](http://localhost:3000/test/repo/src/branch/test)] branch created by user1", pl.(*MatrixPayloadUnsafe).Body) + assert.Equal(t, `[test/repo:test] branch created by user1`, pl.(*MatrixPayloadUnsafe).FormattedBody) + }) -func TestMatrixIssueCommentPayload(t *testing.T) { - p := issueCommentTestPayload() - m := new(MatrixPayloadUnsafe) + t.Run("Delete", func(t *testing.T) { + p := deleteTestPayload() - pl, err := m.IssueComment(p) - require.NoError(t, err) - require.NotNil(t, pl) + d := new(MatrixPayloadUnsafe) + pl, err := d.Delete(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MatrixPayloadUnsafe{}, pl) - assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] New comment on issue [#2 crash](http://localhost:3000/test/repo/issues/2) by [user1](https://try.gitea.io/user1)", pl.(*MatrixPayloadUnsafe).Body) - assert.Equal(t, "[test/repo] New comment on issue #2 crash by user1", pl.(*MatrixPayloadUnsafe).FormattedBody) -} + assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo):test] branch deleted by user1", pl.(*MatrixPayloadUnsafe).Body) + assert.Equal(t, `[test/repo:test] branch deleted by user1`, pl.(*MatrixPayloadUnsafe).FormattedBody) + }) -func TestMatrixPullRequestCommentPayload(t *testing.T) { - p := pullRequestCommentTestPayload() - m := new(MatrixPayloadUnsafe) + t.Run("Fork", func(t *testing.T) { + p := forkTestPayload() - pl, err := m.IssueComment(p) - require.NoError(t, err) - require.NotNil(t, pl) + d := new(MatrixPayloadUnsafe) + pl, err := d.Fork(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MatrixPayloadUnsafe{}, pl) - assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] New comment on pull request [#2 Fix bug](http://localhost:3000/test/repo/pulls/2) by [user1](https://try.gitea.io/user1)", pl.(*MatrixPayloadUnsafe).Body) - assert.Equal(t, "[test/repo] New comment on pull request #2 Fix bug by user1", pl.(*MatrixPayloadUnsafe).FormattedBody) -} + assert.Equal(t, "[test/repo2](http://localhost:3000/test/repo2) is forked to [test/repo](http://localhost:3000/test/repo)", pl.(*MatrixPayloadUnsafe).Body) + assert.Equal(t, `test/repo2 is forked to test/repo`, pl.(*MatrixPayloadUnsafe).FormattedBody) + }) -func TestMatrixReleasePayload(t *testing.T) { - p := pullReleaseTestPayload() - m := new(MatrixPayloadUnsafe) + t.Run("Push", func(t *testing.T) { + p := pushTestPayload() - pl, err := m.Release(p) - require.NoError(t, err) - require.NotNil(t, pl) + d := new(MatrixPayloadUnsafe) + pl, err := d.Push(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MatrixPayloadUnsafe{}, pl) + + assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] user1 pushed 2 commits to [test](http://localhost:3000/test/repo/src/branch/test):\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778): commit message - user1\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778): commit message - user1", pl.(*MatrixPayloadUnsafe).Body) + assert.Equal(t, `[test/repo] user1 pushed 2 commits to test:
    2020558: commit message - user1
    2020558: commit message - user1`, pl.(*MatrixPayloadUnsafe).FormattedBody) + }) + + t.Run("Issue", func(t *testing.T) { + p := issueTestPayload() + + d := new(MatrixPayloadUnsafe) + p.Action = api.HookIssueOpened + pl, err := d.Issue(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MatrixPayloadUnsafe{}, pl) + + assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] Issue opened: [#2 crash](http://localhost:3000/test/repo/issues/2) by [user1](https://try.gitea.io/user1)", pl.(*MatrixPayloadUnsafe).Body) + assert.Equal(t, `[test/repo] Issue opened: #2 crash by user1`, pl.(*MatrixPayloadUnsafe).FormattedBody) + + p.Action = api.HookIssueClosed + pl, err = d.Issue(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MatrixPayloadUnsafe{}, pl) + + assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] Issue closed: [#2 crash](http://localhost:3000/test/repo/issues/2) by [user1](https://try.gitea.io/user1)", pl.(*MatrixPayloadUnsafe).Body) + assert.Equal(t, `[test/repo] Issue closed: #2 crash by user1`, pl.(*MatrixPayloadUnsafe).FormattedBody) + }) - assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] Release created: [v1.0](http://localhost:3000/test/repo/src/v1.0) by [user1](https://try.gitea.io/user1)", pl.(*MatrixPayloadUnsafe).Body) - assert.Equal(t, "[test/repo] Release created: v1.0 by user1", pl.(*MatrixPayloadUnsafe).FormattedBody) + t.Run("IssueComment", func(t *testing.T) { + p := issueCommentTestPayload() + + d := new(MatrixPayloadUnsafe) + pl, err := d.IssueComment(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MatrixPayloadUnsafe{}, pl) + + assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] New comment on issue [#2 crash](http://localhost:3000/test/repo/issues/2) by [user1](https://try.gitea.io/user1)", pl.(*MatrixPayloadUnsafe).Body) + assert.Equal(t, `[test/repo] New comment on issue #2 crash by user1`, pl.(*MatrixPayloadUnsafe).FormattedBody) + }) + + t.Run("PullRequest", func(t *testing.T) { + p := pullRequestTestPayload() + + d := new(MatrixPayloadUnsafe) + pl, err := d.PullRequest(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MatrixPayloadUnsafe{}, pl) + + assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] Pull request opened: [#12 Fix bug](http://localhost:3000/test/repo/pulls/12) by [user1](https://try.gitea.io/user1)", pl.(*MatrixPayloadUnsafe).Body) + assert.Equal(t, `[test/repo] Pull request opened: #12 Fix bug by user1`, pl.(*MatrixPayloadUnsafe).FormattedBody) + }) + + t.Run("PullRequestComment", func(t *testing.T) { + p := pullRequestCommentTestPayload() + + d := new(MatrixPayloadUnsafe) + pl, err := d.IssueComment(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MatrixPayloadUnsafe{}, pl) + + assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] New comment on pull request [#12 Fix bug](http://localhost:3000/test/repo/pulls/12) by [user1](https://try.gitea.io/user1)", pl.(*MatrixPayloadUnsafe).Body) + assert.Equal(t, `[test/repo] New comment on pull request #12 Fix bug by user1`, pl.(*MatrixPayloadUnsafe).FormattedBody) + }) + + t.Run("Review", func(t *testing.T) { + p := pullRequestTestPayload() + p.Action = api.HookIssueReviewed + + d := new(MatrixPayloadUnsafe) + pl, err := d.Review(p, models.HookEventPullRequestReviewApproved) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MatrixPayloadUnsafe{}, pl) + + assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] Pull request review approved: [#12 Fix bug](http://localhost:3000/test/repo/pulls/12) by [user1](https://try.gitea.io/user1)", pl.(*MatrixPayloadUnsafe).Body) + assert.Equal(t, `[test/repo] Pull request review approved: [#12 Fix bug](http://localhost:3000/test/repo/pulls/12) by user1`, pl.(*MatrixPayloadUnsafe).FormattedBody) + }) + + t.Run("Repository", func(t *testing.T) { + p := repositoryTestPayload() + + d := new(MatrixPayloadUnsafe) + pl, err := d.Repository(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MatrixPayloadUnsafe{}, pl) + + assert.Equal(t, `[[test/repo](http://localhost:3000/test/repo)] Repository created by [user1](https://try.gitea.io/user1)`, pl.(*MatrixPayloadUnsafe).Body) + assert.Equal(t, `[test/repo] Repository created by user1`, pl.(*MatrixPayloadUnsafe).FormattedBody) + }) + + t.Run("Release", func(t *testing.T) { + p := pullReleaseTestPayload() + + d := new(MatrixPayloadUnsafe) + pl, err := d.Release(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MatrixPayloadUnsafe{}, pl) + + assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] Release created: [v1.0](http://localhost:3000/test/repo/src/v1.0) by [user1](https://try.gitea.io/user1)", pl.(*MatrixPayloadUnsafe).Body) + assert.Equal(t, `[test/repo] Release created: v1.0 by user1`, pl.(*MatrixPayloadUnsafe).FormattedBody) + }) } -func TestMatrixPullRequestPayload(t *testing.T) { - p := pullRequestTestPayload() - m := new(MatrixPayloadUnsafe) +func TestMatrixJSONPayload(t *testing.T) { + p := pushTestPayload() - pl, err := m.PullRequest(p) + pl, err := new(MatrixPayloadUnsafe).Push(p) require.NoError(t, err) require.NotNil(t, pl) + require.IsType(t, &MatrixPayloadUnsafe{}, pl) - assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] Pull request opened: [#2 Fix bug](http://localhost:3000/test/repo/pulls/12) by [user1](https://try.gitea.io/user1)", pl.(*MatrixPayloadUnsafe).Body) - assert.Equal(t, "[test/repo] Pull request opened: #2 Fix bug by user1", pl.(*MatrixPayloadUnsafe).FormattedBody) + json, err := pl.JSONPayload() + require.NoError(t, err) + assert.NotEmpty(t, json) } func TestMatrixHookRequest(t *testing.T) { + w := &models.Webhook{} + h := &models.HookTask{ PayloadContent: `{ "body": "[[user1/test](http://localhost:3000/user1/test)] user1 pushed 1 commit to [master](http://localhost:3000/user1/test/src/branch/master):\n[5175ef2](http://localhost:3000/user1/test/commit/5175ef26201c58b035a3404b3fe02b4e8d436eee): Merge pull request 'Change readme.md' (#2) from add-matrix-webhook into master\n\nReviewed-on: http://localhost:3000/user1/test/pulls/2\n - user1", @@ -143,7 +247,7 @@ func TestMatrixHookRequest(t *testing.T) { ] }` - req, err := getMatrixHookRequest(h) + req, err := getMatrixHookRequest(w, h) require.NoError(t, err) require.NotNil(t, req) diff --git a/services/webhook/msteams.go b/services/webhook/msteams.go index dc83a47c8d80..035dbc1c4cf2 100644 --- a/services/webhook/msteams.go +++ b/services/webhook/msteams.go @@ -55,9 +55,6 @@ type ( } ) -// SetSecret sets the MSTeams secret -func (m *MSTeamsPayload) SetSecret(_ string) {} - // JSONPayload Marshals the MSTeamsPayload to json func (m *MSTeamsPayload) JSONPayload() ([]byte, error) { json := jsoniter.ConfigCompatibleWithStandardLibrary @@ -78,42 +75,15 @@ func (m *MSTeamsPayload) Create(p *api.CreatePayload) (api.Payloader, error) { refName := git.RefEndName(p.Ref) title := fmt.Sprintf("[%s] %s %s created", p.Repo.FullName, p.RefType, refName) - return &MSTeamsPayload{ - Type: "MessageCard", - Context: "https://schema.org/extensions", - ThemeColor: fmt.Sprintf("%x", greenColor), - Title: title, - Summary: title, - Sections: []MSTeamsSection{ - { - ActivityTitle: p.Sender.FullName, - ActivitySubtitle: p.Sender.UserName, - ActivityImage: p.Sender.AvatarURL, - Facts: []MSTeamsFact{ - { - Name: "Repository:", - Value: p.Repo.FullName, - }, - { - Name: fmt.Sprintf("%s:", p.RefType), - Value: refName, - }, - }, - }, - }, - PotentialAction: []MSTeamsAction{ - { - Type: "OpenUri", - Name: "View in Gitea", - Targets: []MSTeamsActionTarget{ - { - Os: "default", - URI: p.Repo.HTMLURL + "/src/" + refName, - }, - }, - }, - }, - }, nil + return createMSTeamsPayload( + p.Repo, + p.Sender, + title, + "", + p.Repo.HTMLURL+"/src/"+refName, + greenColor, + &MSTeamsFact{fmt.Sprintf("%s:", p.RefType), refName}, + ), nil } // Delete implements PayloadConvertor Delete method @@ -122,84 +92,30 @@ func (m *MSTeamsPayload) Delete(p *api.DeletePayload) (api.Payloader, error) { refName := git.RefEndName(p.Ref) title := fmt.Sprintf("[%s] %s %s deleted", p.Repo.FullName, p.RefType, refName) - return &MSTeamsPayload{ - Type: "MessageCard", - Context: "https://schema.org/extensions", - ThemeColor: fmt.Sprintf("%x", yellowColor), - Title: title, - Summary: title, - Sections: []MSTeamsSection{ - { - ActivityTitle: p.Sender.FullName, - ActivitySubtitle: p.Sender.UserName, - ActivityImage: p.Sender.AvatarURL, - Facts: []MSTeamsFact{ - { - Name: "Repository:", - Value: p.Repo.FullName, - }, - { - Name: fmt.Sprintf("%s:", p.RefType), - Value: refName, - }, - }, - }, - }, - PotentialAction: []MSTeamsAction{ - { - Type: "OpenUri", - Name: "View in Gitea", - Targets: []MSTeamsActionTarget{ - { - Os: "default", - URI: p.Repo.HTMLURL + "/src/" + refName, - }, - }, - }, - }, - }, nil + return createMSTeamsPayload( + p.Repo, + p.Sender, + title, + "", + p.Repo.HTMLURL+"/src/"+refName, + yellowColor, + &MSTeamsFact{fmt.Sprintf("%s:", p.RefType), refName}, + ), nil } // Fork implements PayloadConvertor Fork method func (m *MSTeamsPayload) Fork(p *api.ForkPayload) (api.Payloader, error) { title := fmt.Sprintf("%s is forked to %s", p.Forkee.FullName, p.Repo.FullName) - return &MSTeamsPayload{ - Type: "MessageCard", - Context: "https://schema.org/extensions", - ThemeColor: fmt.Sprintf("%x", greenColor), - Title: title, - Summary: title, - Sections: []MSTeamsSection{ - { - ActivityTitle: p.Sender.FullName, - ActivitySubtitle: p.Sender.UserName, - ActivityImage: p.Sender.AvatarURL, - Facts: []MSTeamsFact{ - { - Name: "Forkee:", - Value: p.Forkee.FullName, - }, - { - Name: "Repository:", - Value: p.Repo.FullName, - }, - }, - }, - }, - PotentialAction: []MSTeamsAction{ - { - Type: "OpenUri", - Name: "View in Gitea", - Targets: []MSTeamsActionTarget{ - { - Os: "default", - URI: p.Repo.HTMLURL, - }, - }, - }, - }, - }, nil + return createMSTeamsPayload( + p.Repo, + p.Sender, + title, + "", + p.Repo.HTMLURL, + greenColor, + &MSTeamsFact{"Forkee:", p.Forkee.FullName}, + ), nil } // Push implements PayloadConvertor Push method @@ -234,172 +150,60 @@ func (m *MSTeamsPayload) Push(p *api.PushPayload) (api.Payloader, error) { } } - return &MSTeamsPayload{ - Type: "MessageCard", - Context: "https://schema.org/extensions", - ThemeColor: fmt.Sprintf("%x", greenColor), - Title: title, - Summary: title, - Sections: []MSTeamsSection{ - { - ActivityTitle: p.Sender.FullName, - ActivitySubtitle: p.Sender.UserName, - ActivityImage: p.Sender.AvatarURL, - Text: text, - Facts: []MSTeamsFact{ - { - Name: "Repository:", - Value: p.Repo.FullName, - }, - { - Name: "Commit count:", - Value: fmt.Sprintf("%d", len(p.Commits)), - }, - }, - }, - }, - PotentialAction: []MSTeamsAction{ - { - Type: "OpenUri", - Name: "View in Gitea", - Targets: []MSTeamsActionTarget{ - { - Os: "default", - URI: titleLink, - }, - }, - }, - }, - }, nil + return createMSTeamsPayload( + p.Repo, + p.Sender, + title, + text, + titleLink, + greenColor, + &MSTeamsFact{"Commit count:", fmt.Sprintf("%d", len(p.Commits))}, + ), nil } // Issue implements PayloadConvertor Issue method func (m *MSTeamsPayload) Issue(p *api.IssuePayload) (api.Payloader, error) { - text, _, attachmentText, color := getIssuesPayloadInfo(p, noneLinkFormatter, false) - - return &MSTeamsPayload{ - Type: "MessageCard", - Context: "https://schema.org/extensions", - ThemeColor: fmt.Sprintf("%x", color), - Title: text, - Summary: text, - Sections: []MSTeamsSection{ - { - ActivityTitle: p.Sender.FullName, - ActivitySubtitle: p.Sender.UserName, - ActivityImage: p.Sender.AvatarURL, - Text: attachmentText, - Facts: []MSTeamsFact{ - { - Name: "Repository:", - Value: p.Repository.FullName, - }, - { - Name: "Issue #:", - Value: fmt.Sprintf("%d", p.Issue.ID), - }, - }, - }, - }, - PotentialAction: []MSTeamsAction{ - { - Type: "OpenUri", - Name: "View in Gitea", - Targets: []MSTeamsActionTarget{ - { - Os: "default", - URI: p.Issue.HTMLURL, - }, - }, - }, - }, - }, nil + title, _, attachmentText, color := getIssuesPayloadInfo(p, noneLinkFormatter, false) + + return createMSTeamsPayload( + p.Repository, + p.Sender, + title, + attachmentText, + p.Issue.HTMLURL, + color, + &MSTeamsFact{"Issue #:", fmt.Sprintf("%d", p.Issue.ID)}, + ), nil } // IssueComment implements PayloadConvertor IssueComment method func (m *MSTeamsPayload) IssueComment(p *api.IssueCommentPayload) (api.Payloader, error) { - text, _, color := getIssueCommentPayloadInfo(p, noneLinkFormatter, false) - - return &MSTeamsPayload{ - Type: "MessageCard", - Context: "https://schema.org/extensions", - ThemeColor: fmt.Sprintf("%x", color), - Title: text, - Summary: text, - Sections: []MSTeamsSection{ - { - ActivityTitle: p.Sender.FullName, - ActivitySubtitle: p.Sender.UserName, - ActivityImage: p.Sender.AvatarURL, - Text: p.Comment.Body, - Facts: []MSTeamsFact{ - { - Name: "Repository:", - Value: p.Repository.FullName, - }, - { - Name: "Issue #:", - Value: fmt.Sprintf("%d", p.Issue.ID), - }, - }, - }, - }, - PotentialAction: []MSTeamsAction{ - { - Type: "OpenUri", - Name: "View in Gitea", - Targets: []MSTeamsActionTarget{ - { - Os: "default", - URI: p.Comment.HTMLURL, - }, - }, - }, - }, - }, nil + title, _, color := getIssueCommentPayloadInfo(p, noneLinkFormatter, false) + + return createMSTeamsPayload( + p.Repository, + p.Sender, + title, + p.Comment.Body, + p.Comment.HTMLURL, + color, + &MSTeamsFact{"Issue #:", fmt.Sprintf("%d", p.Issue.ID)}, + ), nil } // PullRequest implements PayloadConvertor PullRequest method func (m *MSTeamsPayload) PullRequest(p *api.PullRequestPayload) (api.Payloader, error) { - text, _, attachmentText, color := getPullRequestPayloadInfo(p, noneLinkFormatter, false) - - return &MSTeamsPayload{ - Type: "MessageCard", - Context: "https://schema.org/extensions", - ThemeColor: fmt.Sprintf("%x", color), - Title: text, - Summary: text, - Sections: []MSTeamsSection{ - { - ActivityTitle: p.Sender.FullName, - ActivitySubtitle: p.Sender.UserName, - ActivityImage: p.Sender.AvatarURL, - Text: attachmentText, - Facts: []MSTeamsFact{ - { - Name: "Repository:", - Value: p.Repository.FullName, - }, - { - Name: "Pull request #:", - Value: fmt.Sprintf("%d", p.PullRequest.ID), - }, - }, - }, - }, - PotentialAction: []MSTeamsAction{ - { - Type: "OpenUri", - Name: "View in Gitea", - Targets: []MSTeamsActionTarget{ - { - Os: "default", - URI: p.PullRequest.HTMLURL, - }, - }, - }, - }, - }, nil + title, _, attachmentText, color := getPullRequestPayloadInfo(p, noneLinkFormatter, false) + + return createMSTeamsPayload( + p.Repository, + p.Sender, + title, + attachmentText, + p.PullRequest.HTMLURL, + color, + &MSTeamsFact{"Pull request #:", fmt.Sprintf("%d", p.PullRequest.ID)}, + ), nil } // Review implements PayloadConvertor Review method @@ -428,43 +232,15 @@ func (m *MSTeamsPayload) Review(p *api.PullRequestPayload, event models.HookEven } } - return &MSTeamsPayload{ - Type: "MessageCard", - Context: "https://schema.org/extensions", - ThemeColor: fmt.Sprintf("%x", color), - Title: title, - Summary: title, - Sections: []MSTeamsSection{ - { - ActivityTitle: p.Sender.FullName, - ActivitySubtitle: p.Sender.UserName, - ActivityImage: p.Sender.AvatarURL, - Text: text, - Facts: []MSTeamsFact{ - { - Name: "Repository:", - Value: p.Repository.FullName, - }, - { - Name: "Pull request #:", - Value: fmt.Sprintf("%d", p.PullRequest.ID), - }, - }, - }, - }, - PotentialAction: []MSTeamsAction{ - { - Type: "OpenUri", - Name: "View in Gitea", - Targets: []MSTeamsActionTarget{ - { - Os: "default", - URI: p.PullRequest.HTMLURL, - }, - }, - }, - }, - }, nil + return createMSTeamsPayload( + p.Repository, + p.Sender, + title, + text, + p.PullRequest.HTMLURL, + color, + &MSTeamsFact{"Pull request #:", fmt.Sprintf("%d", p.PullRequest.ID)}, + ), nil } // Repository implements PayloadConvertor Repository method @@ -481,66 +257,61 @@ func (m *MSTeamsPayload) Repository(p *api.RepositoryPayload) (api.Payloader, er color = yellowColor } - return &MSTeamsPayload{ - Type: "MessageCard", - Context: "https://schema.org/extensions", - ThemeColor: fmt.Sprintf("%x", color), - Title: title, - Summary: title, - Sections: []MSTeamsSection{ - { - ActivityTitle: p.Sender.FullName, - ActivitySubtitle: p.Sender.UserName, - ActivityImage: p.Sender.AvatarURL, - Facts: []MSTeamsFact{ - { - Name: "Repository:", - Value: p.Repository.FullName, - }, - }, - }, - }, - PotentialAction: []MSTeamsAction{ - { - Type: "OpenUri", - Name: "View in Gitea", - Targets: []MSTeamsActionTarget{ - { - Os: "default", - URI: url, - }, - }, - }, - }, - }, nil + return createMSTeamsPayload( + p.Repository, + p.Sender, + title, + "", + url, + color, + nil, + ), nil } // Release implements PayloadConvertor Release method func (m *MSTeamsPayload) Release(p *api.ReleasePayload) (api.Payloader, error) { - text, color := getReleasePayloadInfo(p, noneLinkFormatter, false) + title, color := getReleasePayloadInfo(p, noneLinkFormatter, false) + + return createMSTeamsPayload( + p.Repository, + p.Sender, + title, + "", + p.Release.URL, + color, + &MSTeamsFact{"Tag:", p.Release.TagName}, + ), nil +} + +// GetMSTeamsPayload converts a MSTeams webhook into a MSTeamsPayload +func GetMSTeamsPayload(p api.Payloader, event models.HookEventType, meta string) (api.Payloader, error) { + return convertPayloader(new(MSTeamsPayload), p, event) +} + +func createMSTeamsPayload(r *api.Repository, s *api.User, title, text, actionTarget string, color int, fact *MSTeamsFact) *MSTeamsPayload { + facts := []MSTeamsFact{ + { + Name: "Repository:", + Value: r.FullName, + }, + } + if fact != nil { + facts = append(facts, *fact) + } return &MSTeamsPayload{ Type: "MessageCard", Context: "https://schema.org/extensions", ThemeColor: fmt.Sprintf("%x", color), - Title: text, - Summary: text, + Title: title, + Summary: title, Sections: []MSTeamsSection{ { - ActivityTitle: p.Sender.FullName, - ActivitySubtitle: p.Sender.UserName, - ActivityImage: p.Sender.AvatarURL, - Text: p.Release.Note, - Facts: []MSTeamsFact{ - { - Name: "Repository:", - Value: p.Repository.FullName, - }, - { - Name: "Tag:", - Value: p.Release.TagName, - }, - }, + ActivityTitle: s.FullName, + ActivitySubtitle: s.UserName, + ActivityImage: s.AvatarURL, + Text: text, + Facts: facts, }, }, PotentialAction: []MSTeamsAction{ @@ -550,15 +321,10 @@ func (m *MSTeamsPayload) Release(p *api.ReleasePayload) (api.Payloader, error) { Targets: []MSTeamsActionTarget{ { Os: "default", - URI: p.Release.URL, + URI: actionTarget, }, }, }, }, - }, nil -} - -// GetMSTeamsPayload converts a MSTeams webhook into a MSTeamsPayload -func GetMSTeamsPayload(p api.Payloader, event models.HookEventType, meta string) (api.Payloader, error) { - return convertPayloader(new(MSTeamsPayload), p, event) + } } diff --git a/services/webhook/msteams_test.go b/services/webhook/msteams_test.go new file mode 100644 index 000000000000..2f54c39d396b --- /dev/null +++ b/services/webhook/msteams_test.go @@ -0,0 +1,374 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package webhook + +import ( + "testing" + + "code.gitea.io/gitea/models" + api "code.gitea.io/gitea/modules/structs" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestMSTeamsPayload(t *testing.T) { + t.Run("Create", func(t *testing.T) { + p := createTestPayload() + + d := new(MSTeamsPayload) + pl, err := d.Create(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MSTeamsPayload{}, pl) + + assert.Equal(t, "[test/repo] branch test created", pl.(*MSTeamsPayload).Title) + assert.Equal(t, "[test/repo] branch test created", pl.(*MSTeamsPayload).Summary) + assert.Len(t, pl.(*MSTeamsPayload).Sections, 1) + assert.Equal(t, "user1", pl.(*MSTeamsPayload).Sections[0].ActivitySubtitle) + assert.Empty(t, pl.(*MSTeamsPayload).Sections[0].Text) + assert.Len(t, pl.(*MSTeamsPayload).Sections[0].Facts, 2) + for _, fact := range pl.(*MSTeamsPayload).Sections[0].Facts { + if fact.Name == "Repository:" { + assert.Equal(t, p.Repo.FullName, fact.Value) + } else if fact.Name == "branch:" { + assert.Equal(t, "test", fact.Value) + } else { + t.Fail() + } + } + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction, 1) + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction[0].Targets, 1) + assert.Equal(t, "http://localhost:3000/test/repo/src/test", pl.(*MSTeamsPayload).PotentialAction[0].Targets[0].URI) + }) + + t.Run("Delete", func(t *testing.T) { + p := deleteTestPayload() + + d := new(MSTeamsPayload) + pl, err := d.Delete(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MSTeamsPayload{}, pl) + + assert.Equal(t, "[test/repo] branch test deleted", pl.(*MSTeamsPayload).Title) + assert.Equal(t, "[test/repo] branch test deleted", pl.(*MSTeamsPayload).Summary) + assert.Len(t, pl.(*MSTeamsPayload).Sections, 1) + assert.Equal(t, "user1", pl.(*MSTeamsPayload).Sections[0].ActivitySubtitle) + assert.Empty(t, pl.(*MSTeamsPayload).Sections[0].Text) + assert.Len(t, pl.(*MSTeamsPayload).Sections[0].Facts, 2) + for _, fact := range pl.(*MSTeamsPayload).Sections[0].Facts { + if fact.Name == "Repository:" { + assert.Equal(t, p.Repo.FullName, fact.Value) + } else if fact.Name == "branch:" { + assert.Equal(t, "test", fact.Value) + } else { + t.Fail() + } + } + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction, 1) + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction[0].Targets, 1) + assert.Equal(t, "http://localhost:3000/test/repo/src/test", pl.(*MSTeamsPayload).PotentialAction[0].Targets[0].URI) + }) + + t.Run("Fork", func(t *testing.T) { + p := forkTestPayload() + + d := new(MSTeamsPayload) + pl, err := d.Fork(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MSTeamsPayload{}, pl) + + assert.Equal(t, "test/repo2 is forked to test/repo", pl.(*MSTeamsPayload).Title) + assert.Equal(t, "test/repo2 is forked to test/repo", pl.(*MSTeamsPayload).Summary) + assert.Len(t, pl.(*MSTeamsPayload).Sections, 1) + assert.Equal(t, "user1", pl.(*MSTeamsPayload).Sections[0].ActivitySubtitle) + assert.Empty(t, pl.(*MSTeamsPayload).Sections[0].Text) + assert.Len(t, pl.(*MSTeamsPayload).Sections[0].Facts, 2) + for _, fact := range pl.(*MSTeamsPayload).Sections[0].Facts { + if fact.Name == "Repository:" { + assert.Equal(t, p.Repo.FullName, fact.Value) + } else if fact.Name == "Forkee:" { + assert.Equal(t, p.Forkee.FullName, fact.Value) + } else { + t.Fail() + } + } + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction, 1) + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction[0].Targets, 1) + assert.Equal(t, "http://localhost:3000/test/repo", pl.(*MSTeamsPayload).PotentialAction[0].Targets[0].URI) + }) + + t.Run("Push", func(t *testing.T) { + p := pushTestPayload() + + d := new(MSTeamsPayload) + pl, err := d.Push(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MSTeamsPayload{}, pl) + + assert.Equal(t, "[test/repo:test] 2 new commits", pl.(*MSTeamsPayload).Title) + assert.Equal(t, "[test/repo:test] 2 new commits", pl.(*MSTeamsPayload).Summary) + assert.Len(t, pl.(*MSTeamsPayload).Sections, 1) + assert.Equal(t, "user1", pl.(*MSTeamsPayload).Sections[0].ActivitySubtitle) + assert.Equal(t, "[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1\n\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1", pl.(*MSTeamsPayload).Sections[0].Text) + assert.Len(t, pl.(*MSTeamsPayload).Sections[0].Facts, 2) + for _, fact := range pl.(*MSTeamsPayload).Sections[0].Facts { + if fact.Name == "Repository:" { + assert.Equal(t, p.Repo.FullName, fact.Value) + } else if fact.Name == "Commit count:" { + assert.Equal(t, "2", fact.Value) + } else { + t.Fail() + } + } + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction, 1) + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction[0].Targets, 1) + assert.Equal(t, "http://localhost:3000/test/repo/src/test", pl.(*MSTeamsPayload).PotentialAction[0].Targets[0].URI) + }) + + t.Run("Issue", func(t *testing.T) { + p := issueTestPayload() + + d := new(MSTeamsPayload) + p.Action = api.HookIssueOpened + pl, err := d.Issue(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MSTeamsPayload{}, pl) + + assert.Equal(t, "[test/repo] Issue opened: #2 crash", pl.(*MSTeamsPayload).Title) + assert.Equal(t, "[test/repo] Issue opened: #2 crash", pl.(*MSTeamsPayload).Summary) + assert.Len(t, pl.(*MSTeamsPayload).Sections, 1) + assert.Equal(t, "user1", pl.(*MSTeamsPayload).Sections[0].ActivitySubtitle) + assert.Equal(t, "issue body", pl.(*MSTeamsPayload).Sections[0].Text) + assert.Len(t, pl.(*MSTeamsPayload).Sections[0].Facts, 2) + for _, fact := range pl.(*MSTeamsPayload).Sections[0].Facts { + if fact.Name == "Repository:" { + assert.Equal(t, p.Repository.FullName, fact.Value) + } else if fact.Name == "Issue #:" { + assert.Equal(t, "2", fact.Value) + } else { + t.Fail() + } + } + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction, 1) + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction[0].Targets, 1) + assert.Equal(t, "http://localhost:3000/test/repo/issues/2", pl.(*MSTeamsPayload).PotentialAction[0].Targets[0].URI) + + p.Action = api.HookIssueClosed + pl, err = d.Issue(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MSTeamsPayload{}, pl) + + assert.Equal(t, "[test/repo] Issue closed: #2 crash", pl.(*MSTeamsPayload).Title) + assert.Equal(t, "[test/repo] Issue closed: #2 crash", pl.(*MSTeamsPayload).Summary) + assert.Len(t, pl.(*MSTeamsPayload).Sections, 1) + assert.Equal(t, "user1", pl.(*MSTeamsPayload).Sections[0].ActivitySubtitle) + assert.Empty(t, pl.(*MSTeamsPayload).Sections[0].Text) + assert.Len(t, pl.(*MSTeamsPayload).Sections[0].Facts, 2) + for _, fact := range pl.(*MSTeamsPayload).Sections[0].Facts { + if fact.Name == "Repository:" { + assert.Equal(t, p.Repository.FullName, fact.Value) + } else if fact.Name == "Issue #:" { + assert.Equal(t, "2", fact.Value) + } else { + t.Fail() + } + } + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction, 1) + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction[0].Targets, 1) + assert.Equal(t, "http://localhost:3000/test/repo/issues/2", pl.(*MSTeamsPayload).PotentialAction[0].Targets[0].URI) + }) + + t.Run("IssueComment", func(t *testing.T) { + p := issueCommentTestPayload() + + d := new(MSTeamsPayload) + pl, err := d.IssueComment(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MSTeamsPayload{}, pl) + + assert.Equal(t, "[test/repo] New comment on issue #2 crash", pl.(*MSTeamsPayload).Title) + assert.Equal(t, "[test/repo] New comment on issue #2 crash", pl.(*MSTeamsPayload).Summary) + assert.Len(t, pl.(*MSTeamsPayload).Sections, 1) + assert.Equal(t, "user1", pl.(*MSTeamsPayload).Sections[0].ActivitySubtitle) + assert.Equal(t, "more info needed", pl.(*MSTeamsPayload).Sections[0].Text) + assert.Len(t, pl.(*MSTeamsPayload).Sections[0].Facts, 2) + for _, fact := range pl.(*MSTeamsPayload).Sections[0].Facts { + if fact.Name == "Repository:" { + assert.Equal(t, p.Repository.FullName, fact.Value) + } else if fact.Name == "Issue #:" { + assert.Equal(t, "2", fact.Value) + } else { + t.Fail() + } + } + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction, 1) + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction[0].Targets, 1) + assert.Equal(t, "http://localhost:3000/test/repo/issues/2#issuecomment-4", pl.(*MSTeamsPayload).PotentialAction[0].Targets[0].URI) + }) + + t.Run("PullRequest", func(t *testing.T) { + p := pullRequestTestPayload() + + d := new(MSTeamsPayload) + pl, err := d.PullRequest(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MSTeamsPayload{}, pl) + + assert.Equal(t, "[test/repo] Pull request opened: #12 Fix bug", pl.(*MSTeamsPayload).Title) + assert.Equal(t, "[test/repo] Pull request opened: #12 Fix bug", pl.(*MSTeamsPayload).Summary) + assert.Len(t, pl.(*MSTeamsPayload).Sections, 1) + assert.Equal(t, "user1", pl.(*MSTeamsPayload).Sections[0].ActivitySubtitle) + assert.Equal(t, "fixes bug #2", pl.(*MSTeamsPayload).Sections[0].Text) + assert.Len(t, pl.(*MSTeamsPayload).Sections[0].Facts, 2) + for _, fact := range pl.(*MSTeamsPayload).Sections[0].Facts { + if fact.Name == "Repository:" { + assert.Equal(t, p.Repository.FullName, fact.Value) + } else if fact.Name == "Pull request #:" { + assert.Equal(t, "12", fact.Value) + } else { + t.Fail() + } + } + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction, 1) + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction[0].Targets, 1) + assert.Equal(t, "http://localhost:3000/test/repo/pulls/12", pl.(*MSTeamsPayload).PotentialAction[0].Targets[0].URI) + }) + + t.Run("PullRequestComment", func(t *testing.T) { + p := pullRequestCommentTestPayload() + + d := new(MSTeamsPayload) + pl, err := d.IssueComment(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MSTeamsPayload{}, pl) + + assert.Equal(t, "[test/repo] New comment on pull request #12 Fix bug", pl.(*MSTeamsPayload).Title) + assert.Equal(t, "[test/repo] New comment on pull request #12 Fix bug", pl.(*MSTeamsPayload).Summary) + assert.Len(t, pl.(*MSTeamsPayload).Sections, 1) + assert.Equal(t, "user1", pl.(*MSTeamsPayload).Sections[0].ActivitySubtitle) + assert.Equal(t, "changes requested", pl.(*MSTeamsPayload).Sections[0].Text) + assert.Len(t, pl.(*MSTeamsPayload).Sections[0].Facts, 2) + for _, fact := range pl.(*MSTeamsPayload).Sections[0].Facts { + if fact.Name == "Repository:" { + assert.Equal(t, p.Repository.FullName, fact.Value) + } else if fact.Name == "Issue #:" { + assert.Equal(t, "12", fact.Value) + } else { + t.Fail() + } + } + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction, 1) + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction[0].Targets, 1) + assert.Equal(t, "http://localhost:3000/test/repo/pulls/12#issuecomment-4", pl.(*MSTeamsPayload).PotentialAction[0].Targets[0].URI) + }) + + t.Run("Review", func(t *testing.T) { + p := pullRequestTestPayload() + p.Action = api.HookIssueReviewed + + d := new(MSTeamsPayload) + pl, err := d.Review(p, models.HookEventPullRequestReviewApproved) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MSTeamsPayload{}, pl) + + assert.Equal(t, "[test/repo] Pull request review approved: #12 Fix bug", pl.(*MSTeamsPayload).Title) + assert.Equal(t, "[test/repo] Pull request review approved: #12 Fix bug", pl.(*MSTeamsPayload).Summary) + assert.Len(t, pl.(*MSTeamsPayload).Sections, 1) + assert.Equal(t, "user1", pl.(*MSTeamsPayload).Sections[0].ActivitySubtitle) + assert.Equal(t, "good job", pl.(*MSTeamsPayload).Sections[0].Text) + assert.Len(t, pl.(*MSTeamsPayload).Sections[0].Facts, 2) + for _, fact := range pl.(*MSTeamsPayload).Sections[0].Facts { + if fact.Name == "Repository:" { + assert.Equal(t, p.Repository.FullName, fact.Value) + } else if fact.Name == "Pull request #:" { + assert.Equal(t, "12", fact.Value) + } else { + t.Fail() + } + } + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction, 1) + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction[0].Targets, 1) + assert.Equal(t, "http://localhost:3000/test/repo/pulls/12", pl.(*MSTeamsPayload).PotentialAction[0].Targets[0].URI) + }) + + t.Run("Repository", func(t *testing.T) { + p := repositoryTestPayload() + + d := new(MSTeamsPayload) + pl, err := d.Repository(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MSTeamsPayload{}, pl) + + assert.Equal(t, "[test/repo] Repository created", pl.(*MSTeamsPayload).Title) + assert.Equal(t, "[test/repo] Repository created", pl.(*MSTeamsPayload).Summary) + assert.Len(t, pl.(*MSTeamsPayload).Sections, 1) + assert.Equal(t, "user1", pl.(*MSTeamsPayload).Sections[0].ActivitySubtitle) + assert.Empty(t, pl.(*MSTeamsPayload).Sections[0].Text) + assert.Len(t, pl.(*MSTeamsPayload).Sections[0].Facts, 1) + for _, fact := range pl.(*MSTeamsPayload).Sections[0].Facts { + if fact.Name == "Repository:" { + assert.Equal(t, p.Repository.FullName, fact.Value) + } else { + t.Fail() + } + } + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction, 1) + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction[0].Targets, 1) + assert.Equal(t, "http://localhost:3000/test/repo", pl.(*MSTeamsPayload).PotentialAction[0].Targets[0].URI) + }) + + t.Run("Release", func(t *testing.T) { + p := pullReleaseTestPayload() + + d := new(MSTeamsPayload) + pl, err := d.Release(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MSTeamsPayload{}, pl) + + assert.Equal(t, "[test/repo] Release created: v1.0", pl.(*MSTeamsPayload).Title) + assert.Equal(t, "[test/repo] Release created: v1.0", pl.(*MSTeamsPayload).Summary) + assert.Len(t, pl.(*MSTeamsPayload).Sections, 1) + assert.Equal(t, "user1", pl.(*MSTeamsPayload).Sections[0].ActivitySubtitle) + assert.Empty(t, pl.(*MSTeamsPayload).Sections[0].Text) + assert.Len(t, pl.(*MSTeamsPayload).Sections[0].Facts, 2) + for _, fact := range pl.(*MSTeamsPayload).Sections[0].Facts { + if fact.Name == "Repository:" { + assert.Equal(t, p.Repository.FullName, fact.Value) + } else if fact.Name == "Tag:" { + assert.Equal(t, "v1.0", fact.Value) + } else { + t.Fail() + } + } + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction, 1) + assert.Len(t, pl.(*MSTeamsPayload).PotentialAction[0].Targets, 1) + assert.Equal(t, "http://localhost:3000/api/v1/repos/test/repo/releases/2", pl.(*MSTeamsPayload).PotentialAction[0].Targets[0].URI) + }) +} + +func TestMSTeamsJSONPayload(t *testing.T) { + p := pushTestPayload() + + pl, err := new(MSTeamsPayload).Push(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &MSTeamsPayload{}, pl) + + json, err := pl.JSONPayload() + require.NoError(t, err) + assert.NotEmpty(t, json) +} diff --git a/services/webhook/slack.go b/services/webhook/slack.go index f5c857f2a961..f522ca35f2f0 100644 --- a/services/webhook/slack.go +++ b/services/webhook/slack.go @@ -56,9 +56,6 @@ type SlackAttachment struct { Text string `json:"text"` } -// SetSecret sets the slack secret -func (s *SlackPayload) SetSecret(_ string) {} - // JSONPayload Marshals the SlackPayload to json func (s *SlackPayload) JSONPayload() ([]byte, error) { json := jsoniter.ConfigCompatibleWithStandardLibrary @@ -111,12 +108,7 @@ func (s *SlackPayload) Create(p *api.CreatePayload) (api.Payloader, error) { refLink := SlackLinkToRef(p.Repo.HTMLURL, p.Ref) text := fmt.Sprintf("[%s:%s] %s created by %s", repoLink, refLink, p.RefType, p.Sender.UserName) - return &SlackPayload{ - Channel: s.Channel, - Text: text, - Username: s.Username, - IconURL: s.IconURL, - }, nil + return s.createPayload(text, nil), nil } // Delete composes Slack payload for delete a branch or tag. @@ -124,12 +116,8 @@ func (s *SlackPayload) Delete(p *api.DeletePayload) (api.Payloader, error) { refName := git.RefEndName(p.Ref) repoLink := SlackLinkFormatter(p.Repo.HTMLURL, p.Repo.FullName) text := fmt.Sprintf("[%s:%s] %s deleted by %s", repoLink, refName, p.RefType, p.Sender.UserName) - return &SlackPayload{ - Channel: s.Channel, - Text: text, - Username: s.Username, - IconURL: s.IconURL, - }, nil + + return s.createPayload(text, nil), nil } // Fork composes Slack payload for forked by a repository. @@ -137,66 +125,46 @@ func (s *SlackPayload) Fork(p *api.ForkPayload) (api.Payloader, error) { baseLink := SlackLinkFormatter(p.Forkee.HTMLURL, p.Forkee.FullName) forkLink := SlackLinkFormatter(p.Repo.HTMLURL, p.Repo.FullName) text := fmt.Sprintf("%s is forked to %s", baseLink, forkLink) - return &SlackPayload{ - Channel: s.Channel, - Text: text, - Username: s.Username, - IconURL: s.IconURL, - }, nil + + return s.createPayload(text, nil), nil } // Issue implements PayloadConvertor Issue method func (s *SlackPayload) Issue(p *api.IssuePayload) (api.Payloader, error) { text, issueTitle, attachmentText, color := getIssuesPayloadInfo(p, SlackLinkFormatter, true) - pl := &SlackPayload{ - Channel: s.Channel, - Text: text, - Username: s.Username, - IconURL: s.IconURL, - } + var attachments []SlackAttachment if attachmentText != "" { attachmentText = SlackTextFormatter(attachmentText) issueTitle = SlackTextFormatter(issueTitle) - pl.Attachments = []SlackAttachment{{ + attachments = append(attachments, SlackAttachment{ Color: fmt.Sprintf("%x", color), Title: issueTitle, TitleLink: p.Issue.HTMLURL, Text: attachmentText, - }} + }) } - return pl, nil + return s.createPayload(text, attachments), nil } // IssueComment implements PayloadConvertor IssueComment method func (s *SlackPayload) IssueComment(p *api.IssueCommentPayload) (api.Payloader, error) { text, issueTitle, color := getIssueCommentPayloadInfo(p, SlackLinkFormatter, true) - return &SlackPayload{ - Channel: s.Channel, - Text: text, - Username: s.Username, - IconURL: s.IconURL, - Attachments: []SlackAttachment{{ - Color: fmt.Sprintf("%x", color), - Title: issueTitle, - TitleLink: p.Comment.HTMLURL, - Text: SlackTextFormatter(p.Comment.Body), - }}, - }, nil + return s.createPayload(text, []SlackAttachment{{ + Color: fmt.Sprintf("%x", color), + Title: issueTitle, + TitleLink: p.Comment.HTMLURL, + Text: SlackTextFormatter(p.Comment.Body), + }}), nil } // Release implements PayloadConvertor Release method func (s *SlackPayload) Release(p *api.ReleasePayload) (api.Payloader, error) { text, _ := getReleasePayloadInfo(p, SlackLinkFormatter, true) - return &SlackPayload{ - Channel: s.Channel, - Text: text, - Username: s.Username, - IconURL: s.IconURL, - }, nil + return s.createPayload(text, nil), nil } // Push implements PayloadConvertor Push method @@ -232,42 +200,31 @@ func (s *SlackPayload) Push(p *api.PushPayload) (api.Payloader, error) { } } - return &SlackPayload{ - Channel: s.Channel, - Text: text, - Username: s.Username, - IconURL: s.IconURL, - Attachments: []SlackAttachment{{ - Color: s.Color, - Title: p.Repo.HTMLURL, - TitleLink: p.Repo.HTMLURL, - Text: attachmentText, - }}, - }, nil + return s.createPayload(text, []SlackAttachment{{ + Color: s.Color, + Title: p.Repo.HTMLURL, + TitleLink: p.Repo.HTMLURL, + Text: attachmentText, + }}), nil } // PullRequest implements PayloadConvertor PullRequest method func (s *SlackPayload) PullRequest(p *api.PullRequestPayload) (api.Payloader, error) { text, issueTitle, attachmentText, color := getPullRequestPayloadInfo(p, SlackLinkFormatter, true) - pl := &SlackPayload{ - Channel: s.Channel, - Text: text, - Username: s.Username, - IconURL: s.IconURL, - } + var attachments []SlackAttachment if attachmentText != "" { attachmentText = SlackTextFormatter(p.PullRequest.Body) issueTitle = SlackTextFormatter(issueTitle) - pl.Attachments = []SlackAttachment{{ + attachments = append(attachments, SlackAttachment{ Color: fmt.Sprintf("%x", color), Title: issueTitle, TitleLink: p.PullRequest.URL, Text: attachmentText, - }} + }) } - return pl, nil + return s.createPayload(text, attachments), nil } // Review implements PayloadConvertor Review method @@ -288,12 +245,7 @@ func (s *SlackPayload) Review(p *api.PullRequestPayload, event models.HookEventT text = fmt.Sprintf("[%s] Pull request review %s: [%s](%s) by %s", repoLink, action, title, titleLink, senderLink) } - return &SlackPayload{ - Channel: s.Channel, - Text: text, - Username: s.Username, - IconURL: s.IconURL, - }, nil + return s.createPayload(text, nil), nil } // Repository implements PayloadConvertor Repository method @@ -309,12 +261,17 @@ func (s *SlackPayload) Repository(p *api.RepositoryPayload) (api.Payloader, erro text = fmt.Sprintf("[%s] Repository deleted by %s", repoLink, senderLink) } + return s.createPayload(text, nil), nil +} + +func (s *SlackPayload) createPayload(text string, attachments []SlackAttachment) *SlackPayload { return &SlackPayload{ - Channel: s.Channel, - Text: text, - Username: s.Username, - IconURL: s.IconURL, - }, nil + Channel: s.Channel, + Text: text, + Username: s.Username, + IconURL: s.IconURL, + Attachments: attachments, + } } // GetSlackPayload converts a slack webhook into a SlackPayload diff --git a/services/webhook/slack_test.go b/services/webhook/slack_test.go index 20de80bd656d..3f279810c98a 100644 --- a/services/webhook/slack_test.go +++ b/services/webhook/slack_test.go @@ -7,74 +7,166 @@ package webhook import ( "testing" + "code.gitea.io/gitea/models" api "code.gitea.io/gitea/modules/structs" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -func TestSlackIssuesPayloadOpened(t *testing.T) { - p := issueTestPayload() - p.Action = api.HookIssueOpened +func TestSlackPayload(t *testing.T) { + t.Run("Create", func(t *testing.T) { + p := createTestPayload() - s := new(SlackPayload) - s.Username = p.Sender.UserName + d := new(SlackPayload) + pl, err := d.Create(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &SlackPayload{}, pl) - pl, err := s.Issue(p) - require.NoError(t, err) - require.NotNil(t, pl) - assert.Equal(t, "[] Issue opened: by ", pl.(*SlackPayload).Text) + assert.Equal(t, "[:] branch created by user1", pl.(*SlackPayload).Text) + }) - p.Action = api.HookIssueClosed - pl, err = s.Issue(p) - require.NoError(t, err) - require.NotNil(t, pl) - assert.Equal(t, "[] Issue closed: by ", pl.(*SlackPayload).Text) -} + t.Run("Delete", func(t *testing.T) { + p := deleteTestPayload() -func TestSlackIssueCommentPayload(t *testing.T) { - p := issueCommentTestPayload() - s := new(SlackPayload) - s.Username = p.Sender.UserName + d := new(SlackPayload) + pl, err := d.Delete(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &SlackPayload{}, pl) - pl, err := s.IssueComment(p) - require.NoError(t, err) - require.NotNil(t, pl) + assert.Equal(t, "[:test] branch deleted by user1", pl.(*SlackPayload).Text) + }) - assert.Equal(t, "[] New comment on issue by ", pl.(*SlackPayload).Text) -} + t.Run("Fork", func(t *testing.T) { + p := forkTestPayload() -func TestSlackPullRequestCommentPayload(t *testing.T) { - p := pullRequestCommentTestPayload() - s := new(SlackPayload) - s.Username = p.Sender.UserName + d := new(SlackPayload) + pl, err := d.Fork(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &SlackPayload{}, pl) - pl, err := s.IssueComment(p) - require.NoError(t, err) - require.NotNil(t, pl) + assert.Equal(t, " is forked to ", pl.(*SlackPayload).Text) + }) - assert.Equal(t, "[] New comment on pull request by ", pl.(*SlackPayload).Text) -} + t.Run("Push", func(t *testing.T) { + p := pushTestPayload() -func TestSlackReleasePayload(t *testing.T) { - p := pullReleaseTestPayload() - s := new(SlackPayload) - s.Username = p.Sender.UserName + d := new(SlackPayload) + pl, err := d.Push(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &SlackPayload{}, pl) - pl, err := s.Release(p) - require.NoError(t, err) - require.NotNil(t, pl) + assert.Equal(t, "[:] 2 new commits pushed by user1", pl.(*SlackPayload).Text) + }) + + t.Run("Issue", func(t *testing.T) { + p := issueTestPayload() + + d := new(SlackPayload) + p.Action = api.HookIssueOpened + pl, err := d.Issue(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &SlackPayload{}, pl) + + assert.Equal(t, "[] Issue opened: by ", pl.(*SlackPayload).Text) - assert.Equal(t, "[] Release created: by ", pl.(*SlackPayload).Text) + p.Action = api.HookIssueClosed + pl, err = d.Issue(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &SlackPayload{}, pl) + + assert.Equal(t, "[] Issue closed: by ", pl.(*SlackPayload).Text) + }) + + t.Run("IssueComment", func(t *testing.T) { + p := issueCommentTestPayload() + + d := new(SlackPayload) + pl, err := d.IssueComment(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &SlackPayload{}, pl) + + assert.Equal(t, "[] New comment on issue by ", pl.(*SlackPayload).Text) + }) + + t.Run("PullRequest", func(t *testing.T) { + p := pullRequestTestPayload() + + d := new(SlackPayload) + pl, err := d.PullRequest(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &SlackPayload{}, pl) + + assert.Equal(t, "[] Pull request opened: by ", pl.(*SlackPayload).Text) + }) + + t.Run("PullRequestComment", func(t *testing.T) { + p := pullRequestCommentTestPayload() + + d := new(SlackPayload) + pl, err := d.IssueComment(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &SlackPayload{}, pl) + + assert.Equal(t, "[] New comment on pull request by ", pl.(*SlackPayload).Text) + }) + + t.Run("Review", func(t *testing.T) { + p := pullRequestTestPayload() + p.Action = api.HookIssueReviewed + + d := new(SlackPayload) + pl, err := d.Review(p, models.HookEventPullRequestReviewApproved) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &SlackPayload{}, pl) + + assert.Equal(t, "[] Pull request review approved: [#12 Fix bug](http://localhost:3000/test/repo/pulls/12) by ", pl.(*SlackPayload).Text) + }) + + t.Run("Repository", func(t *testing.T) { + p := repositoryTestPayload() + + d := new(SlackPayload) + pl, err := d.Repository(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &SlackPayload{}, pl) + + assert.Equal(t, "[] Repository created by ", pl.(*SlackPayload).Text) + }) + + t.Run("Release", func(t *testing.T) { + p := pullReleaseTestPayload() + + d := new(SlackPayload) + pl, err := d.Release(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &SlackPayload{}, pl) + + assert.Equal(t, "[] Release created: by ", pl.(*SlackPayload).Text) + }) } -func TestSlackPullRequestPayload(t *testing.T) { - p := pullRequestTestPayload() - s := new(SlackPayload) - s.Username = p.Sender.UserName +func TestSlackJSONPayload(t *testing.T) { + p := pushTestPayload() - pl, err := s.PullRequest(p) + pl, err := new(SlackPayload).Push(p) require.NoError(t, err) require.NotNil(t, pl) + require.IsType(t, &SlackPayload{}, pl) - assert.Equal(t, "[] Pull request opened: by ", pl.(*SlackPayload).Text) + json, err := pl.JSONPayload() + require.NoError(t, err) + assert.NotEmpty(t, json) } diff --git a/services/webhook/telegram.go b/services/webhook/telegram.go index 5b78b46f8ec0..4c4230759d31 100644 --- a/services/webhook/telegram.go +++ b/services/webhook/telegram.go @@ -45,9 +45,6 @@ var ( _ PayloadConvertor = &TelegramPayload{} ) -// SetSecret sets the telegram secret -func (t *TelegramPayload) SetSecret(_ string) {} - // JSONPayload Marshals the TelegramPayload to json func (t *TelegramPayload) JSONPayload() ([]byte, error) { t.ParseMode = "HTML" @@ -68,9 +65,7 @@ func (t *TelegramPayload) Create(p *api.CreatePayload) (api.Payloader, error) { title := fmt.Sprintf(`[%s] %s %s created`, p.Repo.HTMLURL, p.Repo.FullName, p.RefType, p.Repo.HTMLURL+"/src/"+refName, refName) - return &TelegramPayload{ - Message: title, - }, nil + return createTelegramPayload(title), nil } // Delete implements PayloadConvertor Delete method @@ -80,18 +75,14 @@ func (t *TelegramPayload) Delete(p *api.DeletePayload) (api.Payloader, error) { title := fmt.Sprintf(`[%s] %s %s deleted`, p.Repo.HTMLURL, p.Repo.FullName, p.RefType, p.Repo.HTMLURL+"/src/"+refName, refName) - return &TelegramPayload{ - Message: title, - }, nil + return createTelegramPayload(title), nil } // Fork implements PayloadConvertor Fork method func (t *TelegramPayload) Fork(p *api.ForkPayload) (api.Payloader, error) { title := fmt.Sprintf(`%s is forked to %s`, p.Forkee.FullName, p.Repo.HTMLURL, p.Repo.FullName) - return &TelegramPayload{ - Message: title, - }, nil + return createTelegramPayload(title), nil } // Push implements PayloadConvertor Push method @@ -129,36 +120,28 @@ func (t *TelegramPayload) Push(p *api.PushPayload) (api.Payloader, error) { } } - return &TelegramPayload{ - Message: title + "\n" + text, - }, nil + return createTelegramPayload(title + "\n" + text), nil } // Issue implements PayloadConvertor Issue method func (t *TelegramPayload) Issue(p *api.IssuePayload) (api.Payloader, error) { text, _, attachmentText, _ := getIssuesPayloadInfo(p, htmlLinkFormatter, true) - return &TelegramPayload{ - Message: text + "\n\n" + attachmentText, - }, nil + return createTelegramPayload(text + "\n\n" + attachmentText), nil } // IssueComment implements PayloadConvertor IssueComment method func (t *TelegramPayload) IssueComment(p *api.IssueCommentPayload) (api.Payloader, error) { text, _, _ := getIssueCommentPayloadInfo(p, htmlLinkFormatter, true) - return &TelegramPayload{ - Message: text + "\n" + p.Comment.Body, - }, nil + return createTelegramPayload(text + "\n" + p.Comment.Body), nil } // PullRequest implements PayloadConvertor PullRequest method func (t *TelegramPayload) PullRequest(p *api.PullRequestPayload) (api.Payloader, error) { text, _, attachmentText, _ := getPullRequestPayloadInfo(p, htmlLinkFormatter, true) - return &TelegramPayload{ - Message: text + "\n" + attachmentText, - }, nil + return createTelegramPayload(text + "\n" + attachmentText), nil } // Review implements PayloadConvertor Review method @@ -173,12 +156,9 @@ func (t *TelegramPayload) Review(p *api.PullRequestPayload, event models.HookEve text = fmt.Sprintf("[%s] Pull request review %s: #%d %s", p.Repository.FullName, action, p.Index, p.PullRequest.Title) attachmentText = p.Review.Content - } - return &TelegramPayload{ - Message: text + "\n" + attachmentText, - }, nil + return createTelegramPayload(text + "\n" + attachmentText), nil } // Repository implements PayloadConvertor Repository method @@ -187,14 +167,10 @@ func (t *TelegramPayload) Repository(p *api.RepositoryPayload) (api.Payloader, e switch p.Action { case api.HookRepoCreated: title = fmt.Sprintf(`[%s] Repository created`, p.Repository.HTMLURL, p.Repository.FullName) - return &TelegramPayload{ - Message: title, - }, nil + return createTelegramPayload(title), nil case api.HookRepoDeleted: title = fmt.Sprintf("[%s] Repository deleted", p.Repository.FullName) - return &TelegramPayload{ - Message: title, - }, nil + return createTelegramPayload(title), nil } return nil, nil } @@ -203,12 +179,16 @@ func (t *TelegramPayload) Repository(p *api.RepositoryPayload) (api.Payloader, e func (t *TelegramPayload) Release(p *api.ReleasePayload) (api.Payloader, error) { text, _ := getReleasePayloadInfo(p, htmlLinkFormatter, true) - return &TelegramPayload{ - Message: text + "\n", - }, nil + return createTelegramPayload(text), nil } // GetTelegramPayload converts a telegram webhook into a TelegramPayload func GetTelegramPayload(p api.Payloader, event models.HookEventType, meta string) (api.Payloader, error) { return convertPayloader(new(TelegramPayload), p, event) } + +func createTelegramPayload(message string) *TelegramPayload { + return &TelegramPayload{ + Message: strings.TrimSpace(message), + } +} diff --git a/services/webhook/telegram_test.go b/services/webhook/telegram_test.go index 0e909343a86c..037a2481d6df 100644 --- a/services/webhook/telegram_test.go +++ b/services/webhook/telegram_test.go @@ -7,18 +7,166 @@ package webhook import ( "testing" + "code.gitea.io/gitea/models" api "code.gitea.io/gitea/modules/structs" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -func TestGetTelegramIssuesPayload(t *testing.T) { - p := issueTestPayload() - p.Action = api.HookIssueClosed +func TestTelegramPayload(t *testing.T) { + t.Run("Create", func(t *testing.T) { + p := createTestPayload() + + d := new(TelegramPayload) + pl, err := d.Create(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &TelegramPayload{}, pl) + + assert.Equal(t, `[test/repo] branch test created`, pl.(*TelegramPayload).Message) + }) + + t.Run("Delete", func(t *testing.T) { + p := deleteTestPayload() + + d := new(TelegramPayload) + pl, err := d.Delete(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &TelegramPayload{}, pl) + + assert.Equal(t, `[test/repo] branch test deleted`, pl.(*TelegramPayload).Message) + }) + + t.Run("Fork", func(t *testing.T) { + p := forkTestPayload() + + d := new(TelegramPayload) + pl, err := d.Fork(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &TelegramPayload{}, pl) + + assert.Equal(t, `test/repo2 is forked to test/repo`, pl.(*TelegramPayload).Message) + }) + + t.Run("Push", func(t *testing.T) { + p := pushTestPayload() + + d := new(TelegramPayload) + pl, err := d.Push(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &TelegramPayload{}, pl) + + assert.Equal(t, "[test/repo:test] 2 new commits\n[2020558] commit message - user1\n[2020558] commit message - user1", pl.(*TelegramPayload).Message) + }) + + t.Run("Issue", func(t *testing.T) { + p := issueTestPayload() + + d := new(TelegramPayload) + p.Action = api.HookIssueOpened + pl, err := d.Issue(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &TelegramPayload{}, pl) + + assert.Equal(t, "[test/repo] Issue opened: #2 crash by user1\n\nissue body", pl.(*TelegramPayload).Message) + + p.Action = api.HookIssueClosed + pl, err = d.Issue(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &TelegramPayload{}, pl) + + assert.Equal(t, `[test/repo] Issue closed: #2 crash by user1`, pl.(*TelegramPayload).Message) + }) + + t.Run("IssueComment", func(t *testing.T) { + p := issueCommentTestPayload() + + d := new(TelegramPayload) + pl, err := d.IssueComment(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &TelegramPayload{}, pl) - pl, err := new(TelegramPayload).Issue(p) + assert.Equal(t, "[test/repo] New comment on issue #2 crash by user1\nmore info needed", pl.(*TelegramPayload).Message) + }) + + t.Run("PullRequest", func(t *testing.T) { + p := pullRequestTestPayload() + + d := new(TelegramPayload) + pl, err := d.PullRequest(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &TelegramPayload{}, pl) + + assert.Equal(t, "[test/repo] Pull request opened: #12 Fix bug by user1\nfixes bug #2", pl.(*TelegramPayload).Message) + }) + + t.Run("PullRequestComment", func(t *testing.T) { + p := pullRequestCommentTestPayload() + + d := new(TelegramPayload) + pl, err := d.IssueComment(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &TelegramPayload{}, pl) + + assert.Equal(t, "[test/repo] New comment on pull request #12 Fix bug by user1\nchanges requested", pl.(*TelegramPayload).Message) + }) + + t.Run("Review", func(t *testing.T) { + p := pullRequestTestPayload() + p.Action = api.HookIssueReviewed + + d := new(TelegramPayload) + pl, err := d.Review(p, models.HookEventPullRequestReviewApproved) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &TelegramPayload{}, pl) + + assert.Equal(t, "[test/repo] Pull request review approved: #12 Fix bug\ngood job", pl.(*TelegramPayload).Message) + }) + + t.Run("Repository", func(t *testing.T) { + p := repositoryTestPayload() + + d := new(TelegramPayload) + pl, err := d.Repository(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &TelegramPayload{}, pl) + + assert.Equal(t, `[test/repo] Repository created`, pl.(*TelegramPayload).Message) + }) + + t.Run("Release", func(t *testing.T) { + p := pullReleaseTestPayload() + + d := new(TelegramPayload) + pl, err := d.Release(p) + require.NoError(t, err) + require.NotNil(t, pl) + require.IsType(t, &TelegramPayload{}, pl) + + assert.Equal(t, `[test/repo] Release created: v1.0 by user1`, pl.(*TelegramPayload).Message) + }) +} + +func TestTelegramJSONPayload(t *testing.T) { + p := pushTestPayload() + + pl, err := new(TelegramPayload).Push(p) require.NoError(t, err) require.NotNil(t, pl) + require.IsType(t, &TelegramPayload{}, pl) - assert.Equal(t, "[test/repo] Issue closed: #2 crash by user1\n\n", pl.(*TelegramPayload).Message) + json, err := pl.JSONPayload() + require.NoError(t, err) + assert.NotEmpty(t, json) } diff --git a/services/webhook/webhook.go b/services/webhook/webhook.go index cc79ec15d1d7..d094a7754bac 100644 --- a/services/webhook/webhook.go +++ b/services/webhook/webhook.go @@ -5,9 +5,6 @@ package webhook import ( - "crypto/hmac" - "crypto/sha256" - "encoding/hex" "fmt" "strings" @@ -21,12 +18,12 @@ import ( ) type webhook struct { - name models.HookTaskType + name models.HookType payloadCreator func(p api.Payloader, event models.HookEventType, meta string) (api.Payloader, error) } var ( - webhooks = map[models.HookTaskType]*webhook{ + webhooks = map[models.HookType]*webhook{ models.SLACK: { name: models.SLACK, payloadCreator: GetSlackPayload, @@ -60,7 +57,7 @@ var ( // RegisterWebhook registers a webhook func RegisterWebhook(name string, webhook *webhook) { - webhooks[models.HookTaskType(name)] = webhook + webhooks[models.HookType(name)] = webhook } // IsValidHookTaskType returns true if a webhook registered @@ -68,7 +65,7 @@ func IsValidHookTaskType(name string) bool { if name == models.GITEA || name == models.GOGS { return true } - _, ok := webhooks[models.HookTaskType(name)] + _, ok := webhooks[models.HookType(name)] return ok } @@ -161,35 +158,14 @@ func prepareWebhook(w *models.Webhook, repo *models.Repository, event models.Hoo return fmt.Errorf("create payload for %s[%s]: %v", w.Type, event, err) } } else { - p.SetSecret(w.Secret) payloader = p } - var signature string - if len(w.Secret) > 0 { - data, err := payloader.JSONPayload() - if err != nil { - log.Error("prepareWebhooks.JSONPayload: %v", err) - } - sig := hmac.New(sha256.New, []byte(w.Secret)) - _, err = sig.Write(data) - if err != nil { - log.Error("prepareWebhooks.sigWrite: %v", err) - } - signature = hex.EncodeToString(sig.Sum(nil)) - } - if err = models.CreateHookTask(&models.HookTask{ - RepoID: repo.ID, - HookID: w.ID, - Typ: w.Type, - URL: w.URL, - Signature: signature, - Payloader: payloader, - HTTPMethod: w.HTTPMethod, - ContentType: w.ContentType, - EventType: event, - IsSSL: w.IsSSL, + RepoID: repo.ID, + HookID: w.ID, + Payloader: payloader, + EventType: event, }); err != nil { return fmt.Errorf("CreateHookTask: %v", err) } diff --git a/services/wiki/wiki.go b/services/wiki/wiki.go index 75b9d1d1f574..16301034da15 100644 --- a/services/wiki/wiki.go +++ b/services/wiki/wiki.go @@ -81,6 +81,34 @@ func InitWiki(repo *models.Repository) error { return nil } +// prepareWikiFileName try to find a suitable file path with file name by the given raw wiki name. +// return: existence, prepared file path with name, error +func prepareWikiFileName(gitRepo *git.Repository, wikiName string) (bool, string, error) { + unescaped := wikiName + ".md" + escaped := NameToFilename(wikiName) + + // Look for both files + filesInIndex, err := gitRepo.LsFiles(unescaped, escaped) + if err != nil { + log.Error("%v", err) + return false, escaped, err + } + + foundEscaped := false + for _, filename := range filesInIndex { + switch filename { + case unescaped: + // if we find the unescaped file return it + return true, unescaped, nil + case escaped: + foundEscaped = true + } + } + + // If not return whether the escaped file exists, and the escaped filename to keep backwards compatibility. + return foundEscaped, escaped, nil +} + // updateWikiPage adds a new page to the repository wiki. func updateWikiPage(doer *models.User, repo *models.Repository, oldWikiName, newWikiName, content, message string, isNew bool) (err error) { if err = nameAllowed(newWikiName); err != nil { @@ -133,27 +161,29 @@ func updateWikiPage(doer *models.User, repo *models.Repository, oldWikiName, new } } - newWikiPath := NameToFilename(newWikiName) + isWikiExist, newWikiPath, err := prepareWikiFileName(gitRepo, newWikiName) + if err != nil { + return err + } + if isNew { - filesInIndex, err := gitRepo.LsFiles(newWikiPath) - if err != nil { - log.Error("%v", err) - return err - } - if util.IsStringInSlice(newWikiPath, filesInIndex) { + if isWikiExist { return models.ErrWikiAlreadyExist{ Title: newWikiPath, } } } else { - oldWikiPath := NameToFilename(oldWikiName) - filesInIndex, err := gitRepo.LsFiles(oldWikiPath) - if err != nil { - log.Error("%v", err) - return err + // avoid check existence again if wiki name is not changed since gitRepo.LsFiles(...) is not free. + isOldWikiExist := true + oldWikiPath := newWikiPath + if oldWikiName != newWikiName { + isOldWikiExist, oldWikiPath, err = prepareWikiFileName(gitRepo, oldWikiName) + if err != nil { + return err + } } - if util.IsStringInSlice(oldWikiPath, filesInIndex) { + if isOldWikiExist { err := gitRepo.RemoveFilesFromIndex(oldWikiPath) if err != nil { log.Error("%v", err) diff --git a/services/wiki/wiki_test.go b/services/wiki/wiki_test.go index 0e1d460a2040..b35b86d655be 100644 --- a/services/wiki/wiki_test.go +++ b/services/wiki/wiki_test.go @@ -140,7 +140,7 @@ func TestRepository_AddWikiPage(t *testing.T) { wikiPath := NameToFilename(wikiName) entry, err := masterTree.GetTreeEntryByPath(wikiPath) assert.NoError(t, err) - assert.Equal(t, wikiPath, entry.Name(), "%s not addded correctly", wikiName) + assert.Equal(t, wikiPath, entry.Name(), "%s not added correctly", wikiName) }) } @@ -162,6 +162,8 @@ func TestRepository_AddWikiPage(t *testing.T) { } func TestRepository_EditWikiPage(t *testing.T) { + assert.NoError(t, models.PrepareTestDatabase()) + const newWikiContent = "This is the new content" const commitMsg = "Commit message" repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository) diff --git a/snap/snapcraft.yaml b/snap/snapcraft.yaml index 6f19fe5ed6bd..cd989dfbbf0a 100644 --- a/snap/snapcraft.yaml +++ b/snap/snapcraft.yaml @@ -25,14 +25,14 @@ environment: apps: gitea: command: gitea - plugs: [network, network-bind] + plugs: [network, network-bind, removable-media] web: command: gitea web daemon: simple - plugs: [network, network-bind] + plugs: [network, network-bind, removable-media] dump: command: gitea dump - plugs: [home] + plugs: [home, removable-media] version: command: gitea --version sqlite: diff --git a/templates/admin/auth/edit.tmpl b/templates/admin/auth/edit.tmpl index e14c3be76164..d825cd7d12de 100644 --- a/templates/admin/auth/edit.tmpl +++ b/templates/admin/auth/edit.tmpl @@ -53,7 +53,6 @@
    -

    {{.i18n.Tr "admin.auths.bind_password_helper"}}

    {{end}}
    @@ -188,6 +187,10 @@
    +
    + + +
    {{end}} diff --git a/templates/admin/auth/new.tmpl b/templates/admin/auth/new.tmpl index 36a5d2c632a8..302132e06b2f 100644 --- a/templates/admin/auth/new.tmpl +++ b/templates/admin/auth/new.tmpl @@ -38,6 +38,8 @@
    + +
    diff --git a/templates/admin/auth/source/ldap.tmpl b/templates/admin/auth/source/ldap.tmpl index 584538f53bc9..1cbcb2fd415e 100644 --- a/templates/admin/auth/source/ldap.tmpl +++ b/templates/admin/auth/source/ldap.tmpl @@ -28,7 +28,6 @@
    -

    {{.i18n.Tr "admin.auths.bind_password_helper"}}

    diff --git a/templates/admin/config.tmpl b/templates/admin/config.tmpl index 6979512df791..b419d04a1b24 100644 --- a/templates/admin/config.tmpl +++ b/templates/admin/config.tmpl @@ -149,6 +149,8 @@
    {{if .Service.RegisterEmailConfirm}}{{svg "octicon-check"}}{{else}}{{svg "octicon-x"}}{{end}}
    {{.i18n.Tr "admin.config.disable_register"}}
    {{if .Service.DisableRegistration}}{{svg "octicon-check"}}{{else}}{{svg "octicon-x"}}{{end}}
    +
    {{.i18n.Tr "admin.config.allow_only_internal_registration"}}
    +
    {{if .Service.AllowOnlyInternalRegistration}}{{svg "octicon-check"}}{{else}}{{svg "octicon-x"}}{{end}}
    {{.i18n.Tr "admin.config.allow_only_external_registration"}}
    {{if .Service.AllowOnlyExternalRegistration}}{{svg "octicon-check"}}{{else}}{{svg "octicon-x"}}{{end}}
    {{.i18n.Tr "admin.config.show_registration_button"}}
    diff --git a/templates/admin/hook_new.tmpl b/templates/admin/hook_new.tmpl index 9b251ec4cbc1..01e9e429a6c0 100644 --- a/templates/admin/hook_new.tmpl +++ b/templates/admin/hook_new.tmpl @@ -15,23 +15,23 @@ {{end}}
    {{if eq .HookType "gitea"}} - + {{else if eq .HookType "gogs"}} - + {{else if eq .HookType "slack"}} - + {{else if eq .HookType "discord"}} - + {{else if eq .HookType "dingtalk"}} - + {{else if eq .HookType "telegram"}} - + {{else if eq .HookType "msteams"}} - + {{else if eq .HookType "feishu"}} - + {{else if eq .HookType "matrix"}} - + {{end}}
    diff --git a/templates/admin/user/edit.tmpl b/templates/admin/user/edit.tmpl index af01489c0af2..5e5bc75c9695 100644 --- a/templates/admin/user/edit.tmpl +++ b/templates/admin/user/edit.tmpl @@ -28,6 +28,33 @@
  • + +
    + + +
    +
    diff --git a/templates/admin/user/new.tmpl b/templates/admin/user/new.tmpl index 885045dd0270..a433c5a7cc86 100644 --- a/templates/admin/user/new.tmpl +++ b/templates/admin/user/new.tmpl @@ -24,6 +24,31 @@
    + +
    + + +
    +
    diff --git a/templates/base/footer.tmpl b/templates/base/footer.tmpl index bc45315ef9d8..25e163b19b9d 100644 --- a/templates/base/footer.tmpl +++ b/templates/base/footer.tmpl @@ -12,17 +12,17 @@ {{template "base/footer_content" .}} {{if .RequireSimpleMDE}} - - - + + + {{end}} {{if .RequireU2F}} - + {{end}} {{if .EnableCaptcha}} {{if eq .CaptchaType "recaptcha"}} @@ -32,7 +32,7 @@ {{end}} {{end}} - + {{template "custom/footer" .}} diff --git a/templates/base/footer_content.tmpl b/templates/base/footer_content.tmpl index f0d8a6f5a450..e30fe7076863 100644 --- a/templates/base/footer_content.tmpl +++ b/templates/base/footer_content.tmpl @@ -16,7 +16,7 @@ {{end}}
    - {{.i18n.Tr "licenses"}} + {{.i18n.Tr "licenses"}} {{if .EnableSwagger}}API{{end}} {{.i18n.Tr "website"}} {{template "custom/extra_links_footer" .}} diff --git a/templates/base/head.tmpl b/templates/base/head.tmpl index d4de8f7905bc..5091eda1e996 100644 --- a/templates/base/head.tmpl +++ b/templates/base/head.tmpl @@ -29,7 +29,8 @@ window.config = { AppVer: '{{AppVer}}', AppSubUrl: '{{AppSubUrl}}', - StaticUrlPrefix: '{{StaticUrlPrefix}}', + AssetUrlPrefix: '{{AssetUrlPrefix}}', + CustomEmojis: {{CustomEmojis}}, UseServiceWorker: {{UseServiceWorker}}, csrf: '{{.CsrfToken}}', HighlightJS: {{if .RequireHighlightJS}}true{{else}}false{{end}}, @@ -61,12 +62,12 @@ {{end}} }; - - + + {{if .RequireSimpleMDE}} - + {{end}} - +