aboutsummaryrefslogtreecommitdiffstats
path: root/models
diff options
context:
space:
mode:
Diffstat (limited to 'models')
-rw-r--r--models/actions/artifact.go19
-rw-r--r--models/actions/run.go152
-rw-r--r--models/actions/run_job.go33
-rw-r--r--models/actions/run_job_list.go60
-rw-r--r--models/actions/run_job_status_test.go8
-rw-r--r--models/actions/run_list.go33
-rw-r--r--models/actions/runner.go43
-rw-r--r--models/actions/runner_token_test.go6
-rw-r--r--models/actions/schedule.go89
-rw-r--r--models/actions/status.go9
-rw-r--r--models/actions/task.go131
-rw-r--r--models/actions/task_list.go4
-rw-r--r--models/actions/tasks_version.go44
-rw-r--r--models/actions/utils.go19
-rw-r--r--models/actions/variable.go31
-rw-r--r--models/activities/action.go18
-rw-r--r--models/activities/action_list.go27
-rw-r--r--models/activities/action_test.go2
-rw-r--r--models/activities/notification_list.go34
-rw-r--r--models/activities/notification_test.go12
-rw-r--r--models/activities/repo_activity.go5
-rw-r--r--models/activities/statistic.go21
-rw-r--r--models/activities/user_heatmap.go2
-rw-r--r--models/asymkey/error.go2
-rw-r--r--models/asymkey/gpg_key.go21
-rw-r--r--models/asymkey/gpg_key_add.go2
-rw-r--r--models/asymkey/gpg_key_commit_verification.go26
-rw-r--r--models/asymkey/gpg_key_common.go9
-rw-r--r--models/asymkey/gpg_key_verify.go111
-rw-r--r--models/asymkey/ssh_key.go103
-rw-r--r--models/asymkey/ssh_key_commit_verification.go80
-rw-r--r--models/asymkey/ssh_key_deploy.go56
-rw-r--r--models/asymkey/ssh_key_fingerprint.go48
-rw-r--r--models/asymkey/ssh_key_parse.go76
-rw-r--r--models/asymkey/ssh_key_test.go32
-rw-r--r--models/asymkey/ssh_key_verify.go60
-rw-r--r--models/auth/access_token_scope.go11
-rw-r--r--models/auth/access_token_scope_test.go26
-rw-r--r--models/auth/auth_token.go2
-rw-r--r--models/auth/oauth2.go88
-rw-r--r--models/auth/oauth2_test.go2
-rw-r--r--models/auth/session.go84
-rw-r--r--models/auth/source.go45
-rw-r--r--models/auth/source_test.go2
-rw-r--r--models/auth/twofactor.go10
-rw-r--r--models/db/context.go13
-rw-r--r--models/db/context_test.go2
-rwxr-xr-xmodels/db/engine.go2
-rw-r--r--models/db/engine_init.go5
-rw-r--r--models/db/engine_test.go2
-rw-r--r--models/db/error.go2
-rw-r--r--models/db/list_test.go2
-rw-r--r--models/db/name.go7
-rw-r--r--models/db/search.go4
-rw-r--r--models/db/sql_postgres_with_schema.go4
-rw-r--r--models/dbfs/dbfile.go20
-rw-r--r--models/dbfs/dbfs_test.go24
-rw-r--r--models/fixtures/action_artifact.yml54
-rw-r--r--models/fixtures/action_run.yml67
-rw-r--r--models/fixtures/action_run_job.yml60
-rw-r--r--models/fixtures/action_runner.yml51
-rw-r--r--models/fixtures/action_task.yml60
-rw-r--r--models/fixtures/branch.yml120
-rw-r--r--models/fixtures/commit_status.yml5
-rw-r--r--models/fixtures/email_address.yml2
-rw-r--r--models/fixtures/hook_task.yml2
-rw-r--r--models/fixtures/public_key.yml1
-rw-r--r--models/fixtures/repo_transfer.yml8
-rw-r--r--models/fixtures/webhook.yml12
-rw-r--r--models/git/branch.go21
-rw-r--r--models/git/branch_test.go2
-rw-r--r--models/git/commit_status.go136
-rw-r--r--models/git/commit_status_summary.go20
-rw-r--r--models/git/commit_status_test.go93
-rw-r--r--models/git/lfs.go13
-rw-r--r--models/git/lfs_lock.go76
-rw-r--r--models/git/protected_branch.go4
-rw-r--r--models/git/protected_branch_list_test.go2
-rw-r--r--models/git/protected_branch_test.go2
-rw-r--r--models/issues/comment.go231
-rw-r--r--models/issues/comment_code.go5
-rw-r--r--models/issues/comment_list.go35
-rw-r--r--models/issues/comment_test.go18
-rw-r--r--models/issues/dependency.go123
-rw-r--r--models/issues/issue.go21
-rw-r--r--models/issues/issue_index.go22
-rw-r--r--models/issues/issue_label.go207
-rw-r--r--models/issues/issue_list.go45
-rw-r--r--models/issues/issue_list_test.go18
-rw-r--r--models/issues/issue_lock.go43
-rw-r--r--models/issues/issue_search.go48
-rw-r--r--models/issues/issue_stats.go5
-rw-r--r--models/issues/issue_test.go40
-rw-r--r--models/issues/issue_update.go397
-rw-r--r--models/issues/issue_xref.go2
-rw-r--r--models/issues/label.go74
-rw-r--r--models/issues/label_test.go28
-rw-r--r--models/issues/milestone.go176
-rw-r--r--models/issues/milestone_test.go6
-rw-r--r--models/issues/pull.go141
-rw-r--r--models/issues/pull_list.go3
-rw-r--r--models/issues/pull_list_test.go2
-rw-r--r--models/issues/pull_test.go59
-rw-r--r--models/issues/reaction.go18
-rw-r--r--models/issues/review.go575
-rw-r--r--models/issues/review_list.go2
-rw-r--r--models/issues/stopwatch.go167
-rw-r--r--models/issues/stopwatch_test.go61
-rw-r--r--models/issues/tracked_time.go151
-rw-r--r--models/migrations/base/db.go16
-rw-r--r--models/migrations/base/tests.go14
-rw-r--r--models/migrations/migrations.go13
-rw-r--r--models/migrations/migrations_test.go6
-rw-r--r--models/migrations/v1_10/v100.go2
-rw-r--r--models/migrations/v1_10/v101.go2
-rw-r--r--models/migrations/v1_10/v88.go2
-rw-r--r--models/migrations/v1_10/v89.go2
-rw-r--r--models/migrations/v1_10/v90.go2
-rw-r--r--models/migrations/v1_10/v91.go2
-rw-r--r--models/migrations/v1_10/v92.go2
-rw-r--r--models/migrations/v1_10/v93.go2
-rw-r--r--models/migrations/v1_10/v94.go2
-rw-r--r--models/migrations/v1_10/v95.go2
-rw-r--r--models/migrations/v1_10/v96.go2
-rw-r--r--models/migrations/v1_10/v97.go2
-rw-r--r--models/migrations/v1_10/v98.go2
-rw-r--r--models/migrations/v1_10/v99.go2
-rw-r--r--models/migrations/v1_11/v102.go2
-rw-r--r--models/migrations/v1_11/v103.go2
-rw-r--r--models/migrations/v1_11/v104.go2
-rw-r--r--models/migrations/v1_11/v105.go2
-rw-r--r--models/migrations/v1_11/v106.go2
-rw-r--r--models/migrations/v1_11/v107.go2
-rw-r--r--models/migrations/v1_11/v108.go2
-rw-r--r--models/migrations/v1_11/v109.go2
-rw-r--r--models/migrations/v1_11/v110.go2
-rw-r--r--models/migrations/v1_11/v111.go9
-rw-r--r--models/migrations/v1_11/v112.go6
-rw-r--r--models/migrations/v1_11/v113.go2
-rw-r--r--models/migrations/v1_11/v114.go2
-rw-r--r--models/migrations/v1_11/v115.go4
-rw-r--r--models/migrations/v1_11/v116.go2
-rw-r--r--models/migrations/v1_12/v117.go2
-rw-r--r--models/migrations/v1_12/v118.go2
-rw-r--r--models/migrations/v1_12/v119.go2
-rw-r--r--models/migrations/v1_12/v120.go2
-rw-r--r--models/migrations/v1_12/v121.go2
-rw-r--r--models/migrations/v1_12/v122.go2
-rw-r--r--models/migrations/v1_12/v123.go2
-rw-r--r--models/migrations/v1_12/v124.go2
-rw-r--r--models/migrations/v1_12/v125.go2
-rw-r--r--models/migrations/v1_12/v126.go2
-rw-r--r--models/migrations/v1_12/v127.go2
-rw-r--r--models/migrations/v1_12/v128.go2
-rw-r--r--models/migrations/v1_12/v129.go2
-rw-r--r--models/migrations/v1_12/v130.go2
-rw-r--r--models/migrations/v1_12/v131.go2
-rw-r--r--models/migrations/v1_12/v132.go2
-rw-r--r--models/migrations/v1_12/v133.go2
-rw-r--r--models/migrations/v1_12/v134.go2
-rw-r--r--models/migrations/v1_12/v135.go2
-rw-r--r--models/migrations/v1_12/v136.go2
-rw-r--r--models/migrations/v1_12/v137.go2
-rw-r--r--models/migrations/v1_12/v138.go2
-rw-r--r--models/migrations/v1_12/v139.go2
-rw-r--r--models/migrations/v1_13/v140.go11
-rw-r--r--models/migrations/v1_13/v141.go2
-rw-r--r--models/migrations/v1_13/v142.go2
-rw-r--r--models/migrations/v1_13/v143.go2
-rw-r--r--models/migrations/v1_13/v144.go2
-rw-r--r--models/migrations/v1_13/v145.go8
-rw-r--r--models/migrations/v1_13/v146.go2
-rw-r--r--models/migrations/v1_13/v147.go2
-rw-r--r--models/migrations/v1_13/v148.go2
-rw-r--r--models/migrations/v1_13/v149.go2
-rw-r--r--models/migrations/v1_13/v150.go2
-rw-r--r--models/migrations/v1_13/v151.go5
-rw-r--r--models/migrations/v1_13/v152.go2
-rw-r--r--models/migrations/v1_13/v153.go2
-rw-r--r--models/migrations/v1_13/v154.go2
-rw-r--r--models/migrations/v1_14/main_test.go2
-rw-r--r--models/migrations/v1_14/v155.go2
-rw-r--r--models/migrations/v1_14/v156.go2
-rw-r--r--models/migrations/v1_14/v157.go13
-rw-r--r--models/migrations/v1_14/v158.go6
-rw-r--r--models/migrations/v1_14/v159.go2
-rw-r--r--models/migrations/v1_14/v160.go2
-rw-r--r--models/migrations/v1_14/v161.go2
-rw-r--r--models/migrations/v1_14/v162.go2
-rw-r--r--models/migrations/v1_14/v163.go2
-rw-r--r--models/migrations/v1_14/v164.go2
-rw-r--r--models/migrations/v1_14/v165.go12
-rw-r--r--models/migrations/v1_14/v166.go2
-rw-r--r--models/migrations/v1_14/v167.go2
-rw-r--r--models/migrations/v1_14/v168.go2
-rw-r--r--models/migrations/v1_14/v169.go2
-rw-r--r--models/migrations/v1_14/v170.go2
-rw-r--r--models/migrations/v1_14/v171.go2
-rw-r--r--models/migrations/v1_14/v172.go2
-rw-r--r--models/migrations/v1_14/v173.go2
-rw-r--r--models/migrations/v1_14/v174.go2
-rw-r--r--models/migrations/v1_14/v175.go2
-rw-r--r--models/migrations/v1_14/v176.go2
-rw-r--r--models/migrations/v1_14/v176_test.go2
-rw-r--r--models/migrations/v1_14/v177.go2
-rw-r--r--models/migrations/v1_14/v177_test.go2
-rw-r--r--models/migrations/v1_15/main_test.go2
-rw-r--r--models/migrations/v1_15/v178.go2
-rw-r--r--models/migrations/v1_15/v179.go2
-rw-r--r--models/migrations/v1_15/v180.go2
-rw-r--r--models/migrations/v1_15/v181.go2
-rw-r--r--models/migrations/v1_15/v181_test.go6
-rw-r--r--models/migrations/v1_15/v182.go2
-rw-r--r--models/migrations/v1_15/v182_test.go2
-rw-r--r--models/migrations/v1_15/v183.go2
-rw-r--r--models/migrations/v1_15/v184.go2
-rw-r--r--models/migrations/v1_15/v185.go2
-rw-r--r--models/migrations/v1_15/v186.go2
-rw-r--r--models/migrations/v1_15/v187.go2
-rw-r--r--models/migrations/v1_15/v188.go2
-rw-r--r--models/migrations/v1_16/main_test.go2
-rw-r--r--models/migrations/v1_16/v189.go2
-rw-r--r--models/migrations/v1_16/v189_test.go6
-rw-r--r--models/migrations/v1_16/v190.go2
-rw-r--r--models/migrations/v1_16/v191.go2
-rw-r--r--models/migrations/v1_16/v192.go2
-rw-r--r--models/migrations/v1_16/v193.go2
-rw-r--r--models/migrations/v1_16/v193_test.go6
-rw-r--r--models/migrations/v1_16/v194.go2
-rw-r--r--models/migrations/v1_16/v195.go2
-rw-r--r--models/migrations/v1_16/v195_test.go2
-rw-r--r--models/migrations/v1_16/v196.go2
-rw-r--r--models/migrations/v1_16/v197.go2
-rw-r--r--models/migrations/v1_16/v198.go2
-rw-r--r--models/migrations/v1_16/v199.go2
-rw-r--r--models/migrations/v1_16/v200.go2
-rw-r--r--models/migrations/v1_16/v201.go2
-rw-r--r--models/migrations/v1_16/v202.go2
-rw-r--r--models/migrations/v1_16/v203.go2
-rw-r--r--models/migrations/v1_16/v204.go2
-rw-r--r--models/migrations/v1_16/v205.go2
-rw-r--r--models/migrations/v1_16/v206.go2
-rw-r--r--models/migrations/v1_16/v207.go2
-rw-r--r--models/migrations/v1_16/v208.go2
-rw-r--r--models/migrations/v1_16/v209.go2
-rw-r--r--models/migrations/v1_16/v210.go2
-rw-r--r--models/migrations/v1_16/v210_test.go4
-rw-r--r--models/migrations/v1_17/main_test.go2
-rw-r--r--models/migrations/v1_17/v211.go2
-rw-r--r--models/migrations/v1_17/v212.go2
-rw-r--r--models/migrations/v1_17/v213.go2
-rw-r--r--models/migrations/v1_17/v214.go2
-rw-r--r--models/migrations/v1_17/v215.go2
-rw-r--r--models/migrations/v1_17/v216.go2
-rw-r--r--models/migrations/v1_17/v217.go2
-rw-r--r--models/migrations/v1_17/v218.go2
-rw-r--r--models/migrations/v1_17/v219.go2
-rw-r--r--models/migrations/v1_17/v220.go2
-rw-r--r--models/migrations/v1_17/v221.go2
-rw-r--r--models/migrations/v1_17/v221_test.go2
-rw-r--r--models/migrations/v1_17/v222.go5
-rw-r--r--models/migrations/v1_17/v223.go2
-rw-r--r--models/migrations/v1_18/main_test.go2
-rw-r--r--models/migrations/v1_18/v224.go2
-rw-r--r--models/migrations/v1_18/v225.go2
-rw-r--r--models/migrations/v1_18/v226.go2
-rw-r--r--models/migrations/v1_18/v227.go2
-rw-r--r--models/migrations/v1_18/v228.go2
-rw-r--r--models/migrations/v1_18/v229.go2
-rw-r--r--models/migrations/v1_18/v229_test.go2
-rw-r--r--models/migrations/v1_18/v230.go2
-rw-r--r--models/migrations/v1_18/v230_test.go2
-rw-r--r--models/migrations/v1_19/main_test.go2
-rw-r--r--models/migrations/v1_19/v231.go2
-rw-r--r--models/migrations/v1_19/v232.go2
-rw-r--r--models/migrations/v1_19/v233.go2
-rw-r--r--models/migrations/v1_19/v233_test.go4
-rw-r--r--models/migrations/v1_19/v234.go2
-rw-r--r--models/migrations/v1_19/v235.go2
-rw-r--r--models/migrations/v1_19/v236.go2
-rw-r--r--models/migrations/v1_19/v237.go2
-rw-r--r--models/migrations/v1_19/v238.go2
-rw-r--r--models/migrations/v1_19/v239.go2
-rw-r--r--models/migrations/v1_19/v240.go2
-rw-r--r--models/migrations/v1_19/v241.go2
-rw-r--r--models/migrations/v1_19/v242.go2
-rw-r--r--models/migrations/v1_19/v243.go2
-rw-r--r--models/migrations/v1_20/main_test.go2
-rw-r--r--models/migrations/v1_20/v244.go2
-rw-r--r--models/migrations/v1_20/v245.go5
-rw-r--r--models/migrations/v1_20/v246.go2
-rw-r--r--models/migrations/v1_20/v247.go2
-rw-r--r--models/migrations/v1_20/v248.go2
-rw-r--r--models/migrations/v1_20/v249.go2
-rw-r--r--models/migrations/v1_20/v250.go2
-rw-r--r--models/migrations/v1_20/v251.go2
-rw-r--r--models/migrations/v1_20/v252.go2
-rw-r--r--models/migrations/v1_20/v253.go2
-rw-r--r--models/migrations/v1_20/v254.go2
-rw-r--r--models/migrations/v1_20/v255.go2
-rw-r--r--models/migrations/v1_20/v256.go2
-rw-r--r--models/migrations/v1_20/v257.go2
-rw-r--r--models/migrations/v1_20/v258.go2
-rw-r--r--models/migrations/v1_20/v259.go4
-rw-r--r--models/migrations/v1_20/v259_test.go4
-rw-r--r--models/migrations/v1_21/main_test.go2
-rw-r--r--models/migrations/v1_21/v260.go2
-rw-r--r--models/migrations/v1_21/v261.go2
-rw-r--r--models/migrations/v1_21/v262.go2
-rw-r--r--models/migrations/v1_21/v263.go2
-rw-r--r--models/migrations/v1_21/v264.go6
-rw-r--r--models/migrations/v1_21/v265.go2
-rw-r--r--models/migrations/v1_21/v266.go2
-rw-r--r--models/migrations/v1_21/v267.go2
-rw-r--r--models/migrations/v1_21/v268.go2
-rw-r--r--models/migrations/v1_21/v269.go2
-rw-r--r--models/migrations/v1_21/v270.go2
-rw-r--r--models/migrations/v1_21/v271.go3
-rw-r--r--models/migrations/v1_21/v272.go3
-rw-r--r--models/migrations/v1_21/v273.go3
-rw-r--r--models/migrations/v1_21/v274.go3
-rw-r--r--models/migrations/v1_21/v275.go2
-rw-r--r--models/migrations/v1_21/v276.go2
-rw-r--r--models/migrations/v1_21/v277.go2
-rw-r--r--models/migrations/v1_21/v278.go2
-rw-r--r--models/migrations/v1_21/v279.go2
-rw-r--r--models/migrations/v1_22/main_test.go2
-rw-r--r--models/migrations/v1_22/v280.go2
-rw-r--r--models/migrations/v1_22/v281.go2
-rw-r--r--models/migrations/v1_22/v282.go2
-rw-r--r--models/migrations/v1_22/v283.go2
-rw-r--r--models/migrations/v1_22/v283_test.go2
-rw-r--r--models/migrations/v1_22/v284.go3
-rw-r--r--models/migrations/v1_22/v285.go2
-rw-r--r--models/migrations/v1_22/v286.go2
-rw-r--r--models/migrations/v1_22/v286_test.go6
-rw-r--r--models/migrations/v1_22/v287.go2
-rw-r--r--models/migrations/v1_22/v287_test.go6
-rw-r--r--models/migrations/v1_22/v288.go2
-rw-r--r--models/migrations/v1_22/v289.go2
-rw-r--r--models/migrations/v1_22/v290.go2
-rw-r--r--models/migrations/v1_22/v291.go2
-rw-r--r--models/migrations/v1_22/v292.go2
-rw-r--r--models/migrations/v1_22/v293.go2
-rw-r--r--models/migrations/v1_22/v293_test.go2
-rw-r--r--models/migrations/v1_22/v294.go2
-rw-r--r--models/migrations/v1_22/v294_test.go5
-rw-r--r--models/migrations/v1_22/v295.go2
-rw-r--r--models/migrations/v1_22/v296.go2
-rw-r--r--models/migrations/v1_22/v297.go2
-rw-r--r--models/migrations/v1_22/v298.go2
-rw-r--r--models/migrations/v1_23/main_test.go2
-rw-r--r--models/migrations/v1_23/v299.go8
-rw-r--r--models/migrations/v1_23/v300.go8
-rw-r--r--models/migrations/v1_23/v301.go8
-rw-r--r--models/migrations/v1_23/v302.go7
-rw-r--r--models/migrations/v1_23/v302_test.go51
-rw-r--r--models/migrations/v1_23/v303.go8
-rw-r--r--models/migrations/v1_23/v304.go7
-rw-r--r--models/migrations/v1_23/v304_test.go40
-rw-r--r--models/migrations/v1_23/v305.go2
-rw-r--r--models/migrations/v1_23/v306.go8
-rw-r--r--models/migrations/v1_23/v307.go2
-rw-r--r--models/migrations/v1_23/v308.go2
-rw-r--r--models/migrations/v1_23/v309.go2
-rw-r--r--models/migrations/v1_23/v310.go8
-rw-r--r--models/migrations/v1_23/v311.go9
-rw-r--r--models/migrations/v1_24/v312.go8
-rw-r--r--models/migrations/v1_24/v313.go2
-rw-r--r--models/migrations/v1_24/v314.go2
-rw-r--r--models/migrations/v1_24/v315.go19
-rw-r--r--models/migrations/v1_24/v316.go24
-rw-r--r--models/migrations/v1_24/v317.go56
-rw-r--r--models/migrations/v1_24/v318.go21
-rw-r--r--models/migrations/v1_24/v319.go19
-rw-r--r--models/migrations/v1_24/v320.go57
-rw-r--r--models/migrations/v1_25/main_test.go14
-rw-r--r--models/migrations/v1_25/v321.go52
-rw-r--r--models/migrations/v1_25/v321_test.go70
-rw-r--r--models/migrations/v1_6/v70.go2
-rw-r--r--models/migrations/v1_6/v71.go2
-rw-r--r--models/migrations/v1_6/v72.go2
-rw-r--r--models/migrations/v1_7/v73.go2
-rw-r--r--models/migrations/v1_7/v74.go2
-rw-r--r--models/migrations/v1_7/v75.go2
-rw-r--r--models/migrations/v1_8/v76.go2
-rw-r--r--models/migrations/v1_8/v77.go2
-rw-r--r--models/migrations/v1_8/v78.go2
-rw-r--r--models/migrations/v1_8/v79.go2
-rw-r--r--models/migrations/v1_8/v80.go2
-rw-r--r--models/migrations/v1_8/v81.go2
-rw-r--r--models/migrations/v1_9/v82.go2
-rw-r--r--models/migrations/v1_9/v83.go2
-rw-r--r--models/migrations/v1_9/v84.go2
-rw-r--r--models/migrations/v1_9/v85.go2
-rw-r--r--models/migrations/v1_9/v86.go2
-rw-r--r--models/migrations/v1_9/v87.go2
-rw-r--r--models/organization/org.go169
-rw-r--r--models/organization/org_list.go21
-rw-r--r--models/organization/org_list_test.go43
-rw-r--r--models/organization/org_test.go33
-rw-r--r--models/organization/org_user.go2
-rw-r--r--models/organization/org_user_test.go2
-rw-r--r--models/organization/team.go24
-rw-r--r--models/organization/team_repo.go33
-rw-r--r--models/organization/team_repo_test.go2
-rw-r--r--models/organization/team_test.go6
-rw-r--r--models/organization/team_unit.go23
-rw-r--r--models/packages/container/const.go9
-rw-r--r--models/packages/container/search.go12
-rw-r--r--models/packages/descriptor.go37
-rw-r--r--models/packages/nuget/search.go2
-rw-r--r--models/packages/package.go4
-rw-r--r--models/packages/package_file.go5
-rw-r--r--models/packages/package_property.go20
-rw-r--r--models/packages/package_version.go46
-rw-r--r--models/perm/access/repo_permission.go109
-rw-r--r--models/perm/access/repo_permission_test.go68
-rw-r--r--models/project/column.go4
-rw-r--r--models/project/column_test.go8
-rw-r--r--models/project/issue.go4
-rw-r--r--models/project/project.go46
-rw-r--r--models/project/project_test.go4
-rw-r--r--models/pull/automerge.go7
-rw-r--r--models/pull/review_state.go5
-rw-r--r--models/renderhelper/commit_checker.go2
-rw-r--r--models/renderhelper/repo_comment.go43
-rw-r--r--models/renderhelper/repo_comment_test.go7
-rw-r--r--models/renderhelper/repo_file.go18
-rw-r--r--models/renderhelper/repo_file_test.go10
-rw-r--r--models/renderhelper/repo_wiki.go19
-rw-r--r--models/renderhelper/repo_wiki_test.go6
-rw-r--r--models/renderhelper/simple_document.go10
-rw-r--r--models/renderhelper/simple_document_test.go2
-rw-r--r--models/repo.go19
-rw-r--r--models/repo/attachment.go2
-rw-r--r--models/repo/avatar.go3
-rw-r--r--models/repo/collaboration_test.go10
-rw-r--r--models/repo/language_stats.go148
-rw-r--r--models/repo/org_repo.go31
-rw-r--r--models/repo/pushmirror_test.go2
-rw-r--r--models/repo/release.go102
-rw-r--r--models/repo/repo.go100
-rw-r--r--models/repo/repo_list.go40
-rw-r--r--models/repo/repo_list_test.go86
-rw-r--r--models/repo/repo_test.go21
-rw-r--r--models/repo/repo_unit.go28
-rw-r--r--models/repo/repo_unit_test.go10
-rw-r--r--models/repo/star.go77
-rw-r--r--models/repo/topic.go150
-rw-r--r--models/repo/topic_test.go2
-rw-r--r--models/repo/transfer.go4
-rw-r--r--models/repo/update.go37
-rw-r--r--models/repo/upload.go26
-rw-r--r--models/repo/watch_test.go2
-rw-r--r--models/repo/wiki.go4
-rw-r--r--models/repo_test.go4
-rw-r--r--models/secret/secret.go37
-rw-r--r--models/system/notice.go2
-rw-r--r--models/system/setting_test.go12
-rw-r--r--models/unit/unit.go58
-rw-r--r--models/unittest/consistency.go17
-rw-r--r--models/unittest/fixtures_loader.go84
-rw-r--r--models/unittest/fscopy.go2
-rw-r--r--models/unittest/testdb.go22
-rw-r--r--models/unittest/unit_tests.go4
-rw-r--r--models/user/avatar.go7
-rw-r--r--models/user/badge.go2
-rw-r--r--models/user/email_address.go55
-rw-r--r--models/user/email_address_test.go10
-rw-r--r--models/user/follow.go60
-rw-r--r--models/user/search.go9
-rw-r--r--models/user/setting.go5
-rw-r--r--models/user/setting_options.go (renamed from models/user/setting_keys.go)8
-rw-r--r--models/user/setting_test.go8
-rw-r--r--models/user/user.go102
-rw-r--r--models/user/user_list.go5
-rw-r--r--models/user/user_system.go10
-rw-r--r--models/user/user_test.go144
-rw-r--r--models/webhook/hooktask.go5
-rw-r--r--models/webhook/webhook.go27
-rw-r--r--models/webhook/webhook_test.go6
482 files changed, 4676 insertions, 4296 deletions
diff --git a/models/actions/artifact.go b/models/actions/artifact.go
index 524224f070..757bd13acd 100644
--- a/models/actions/artifact.go
+++ b/models/actions/artifact.go
@@ -30,6 +30,25 @@ const (
ArtifactStatusDeleted // 6, ArtifactStatusDeleted is the status of an artifact that is deleted
)
+func (status ArtifactStatus) ToString() string {
+ switch status {
+ case ArtifactStatusUploadPending:
+ return "upload is not yet completed"
+ case ArtifactStatusUploadConfirmed:
+ return "upload is completed"
+ case ArtifactStatusUploadError:
+ return "upload failed"
+ case ArtifactStatusExpired:
+ return "expired"
+ case ArtifactStatusPendingDeletion:
+ return "pending deletion"
+ case ArtifactStatusDeleted:
+ return "deleted"
+ default:
+ return "unknown"
+ }
+}
+
func init() {
db.RegisterModel(new(ActionArtifact))
}
diff --git a/models/actions/run.go b/models/actions/run.go
index 89f7f3e640..f5ccba06c2 100644
--- a/models/actions/run.go
+++ b/models/actions/run.go
@@ -5,6 +5,7 @@ package actions
import (
"context"
+ "errors"
"fmt"
"slices"
"strings"
@@ -15,6 +16,7 @@ import (
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/util"
@@ -164,12 +166,24 @@ func (run *ActionRun) GetPullRequestEventPayload() (*api.PullRequestPayload, err
return nil, fmt.Errorf("event %s is not a pull request event", run.Event)
}
+func (run *ActionRun) GetWorkflowRunEventPayload() (*api.WorkflowRunPayload, error) {
+ if run.Event == webhook_module.HookEventWorkflowRun {
+ var payload api.WorkflowRunPayload
+ if err := json.Unmarshal([]byte(run.EventPayload), &payload); err != nil {
+ return nil, err
+ }
+ return &payload, nil
+ }
+ return nil, fmt.Errorf("event %s is not a workflow run event", run.Event)
+}
+
func (run *ActionRun) IsSchedule() bool {
return run.ScheduleID > 0
}
func updateRepoRunsNumbers(ctx context.Context, repo *repo_model.Repository) error {
_, err := db.GetEngine(ctx).ID(repo.ID).
+ NoAutoTime().
SetExpr("num_action_runs",
builder.Select("count(*)").From("action_run").
Where(builder.Eq{"repo_id": repo.ID}),
@@ -245,7 +259,7 @@ func CancelPreviousJobs(ctx context.Context, repoID int64, ref, workflowID strin
// If the update affected 0 rows, it means the job has changed in the meantime, so we need to try again.
if n == 0 {
- return cancelledJobs, fmt.Errorf("job has changed, try again")
+ return cancelledJobs, errors.New("job has changed, try again")
}
cancelledJobs = append(cancelledJobs, job)
@@ -268,86 +282,81 @@ func CancelPreviousJobs(ctx context.Context, repoID int64, ref, workflowID strin
// InsertRun inserts a run
// The title will be cut off at 255 characters if it's longer than 255 characters.
func InsertRun(ctx context.Context, run *ActionRun, jobs []*jobparser.SingleWorkflow) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- index, err := db.GetNextResourceIndex(ctx, "action_run_index", run.RepoID)
- if err != nil {
- return err
- }
- run.Index = index
- run.Title = util.EllipsisDisplayString(run.Title, 255)
-
- if err := db.Insert(ctx, run); err != nil {
- return err
- }
-
- if run.Repo == nil {
- repo, err := repo_model.GetRepositoryByID(ctx, run.RepoID)
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ index, err := db.GetNextResourceIndex(ctx, "action_run_index", run.RepoID)
if err != nil {
return err
}
- run.Repo = repo
- }
+ run.Index = index
+ run.Title = util.EllipsisDisplayString(run.Title, 255)
- if err := updateRepoRunsNumbers(ctx, run.Repo); err != nil {
- return err
- }
-
- runJobs := make([]*ActionRunJob, 0, len(jobs))
- var hasWaiting bool
- for _, v := range jobs {
- id, job := v.Job()
- needs := job.Needs()
- if err := v.SetJob(id, job.EraseNeeds()); err != nil {
+ if err := db.Insert(ctx, run); err != nil {
return err
}
- payload, _ := v.Marshal()
- status := StatusWaiting
- if len(needs) > 0 || run.NeedApproval {
- status = StatusBlocked
- } else {
- hasWaiting = true
+
+ if run.Repo == nil {
+ repo, err := repo_model.GetRepositoryByID(ctx, run.RepoID)
+ if err != nil {
+ return err
+ }
+ run.Repo = repo
}
- job.Name = util.EllipsisDisplayString(job.Name, 255)
- runJobs = append(runJobs, &ActionRunJob{
- RunID: run.ID,
- RepoID: run.RepoID,
- OwnerID: run.OwnerID,
- CommitSHA: run.CommitSHA,
- IsForkPullRequest: run.IsForkPullRequest,
- Name: job.Name,
- WorkflowPayload: payload,
- JobID: id,
- Needs: needs,
- RunsOn: job.RunsOn(),
- Status: status,
- })
- }
- if err := db.Insert(ctx, runJobs); err != nil {
- return err
- }
- // if there is a job in the waiting status, increase tasks version.
- if hasWaiting {
- if err := IncreaseTaskVersion(ctx, run.OwnerID, run.RepoID); err != nil {
+ if err := updateRepoRunsNumbers(ctx, run.Repo); err != nil {
return err
}
- }
- return committer.Commit()
+ runJobs := make([]*ActionRunJob, 0, len(jobs))
+ var hasWaiting bool
+ for _, v := range jobs {
+ id, job := v.Job()
+ needs := job.Needs()
+ if err := v.SetJob(id, job.EraseNeeds()); err != nil {
+ return err
+ }
+ payload, _ := v.Marshal()
+ status := StatusWaiting
+ if len(needs) > 0 || run.NeedApproval {
+ status = StatusBlocked
+ } else {
+ hasWaiting = true
+ }
+ job.Name = util.EllipsisDisplayString(job.Name, 255)
+ runJobs = append(runJobs, &ActionRunJob{
+ RunID: run.ID,
+ RepoID: run.RepoID,
+ OwnerID: run.OwnerID,
+ CommitSHA: run.CommitSHA,
+ IsForkPullRequest: run.IsForkPullRequest,
+ Name: job.Name,
+ WorkflowPayload: payload,
+ JobID: id,
+ Needs: needs,
+ RunsOn: job.RunsOn(),
+ Status: status,
+ })
+ }
+ if err := db.Insert(ctx, runJobs); err != nil {
+ return err
+ }
+
+ // if there is a job in the waiting status, increase tasks version.
+ if hasWaiting {
+ if err := IncreaseTaskVersion(ctx, run.OwnerID, run.RepoID); err != nil {
+ return err
+ }
+ }
+ return nil
+ })
}
-func GetRunByID(ctx context.Context, id int64) (*ActionRun, error) {
+func GetRunByRepoAndID(ctx context.Context, repoID, runID int64) (*ActionRun, error) {
var run ActionRun
- has, err := db.GetEngine(ctx).Where("id=?", id).Get(&run)
+ has, err := db.GetEngine(ctx).Where("id=? AND repo_id=?", runID, repoID).Get(&run)
if err != nil {
return nil, err
} else if !has {
- return nil, fmt.Errorf("run with id %d: %w", id, util.ErrNotExist)
+ return nil, fmt.Errorf("run with id %d: %w", runID, util.ErrNotExist)
}
return &run, nil
@@ -412,23 +421,16 @@ func UpdateRun(ctx context.Context, run *ActionRun, cols ...string) error {
return err
}
if affected == 0 {
- return fmt.Errorf("run has changed")
+ return errors.New("run has changed")
// It's impossible that the run is not found, since Gitea never deletes runs.
}
if run.Status != 0 || slices.Contains(cols, "status") {
if run.RepoID == 0 {
- run, err = GetRunByID(ctx, run.ID)
- if err != nil {
- return err
- }
+ setting.PanicInDevOrTesting("RepoID should not be 0")
}
- if run.Repo == nil {
- repo, err := repo_model.GetRepositoryByID(ctx, run.RepoID)
- if err != nil {
- return err
- }
- run.Repo = repo
+ if err = run.LoadRepo(ctx); err != nil {
+ return err
}
if err := updateRepoRunsNumbers(ctx, run.Repo); err != nil {
return err
diff --git a/models/actions/run_job.go b/models/actions/run_job.go
index de4b6aab66..e7fa21270c 100644
--- a/models/actions/run_job.go
+++ b/models/actions/run_job.go
@@ -10,6 +10,7 @@ import (
"time"
"code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/util"
@@ -19,11 +20,12 @@ import (
// ActionRunJob represents a job of a run
type ActionRunJob struct {
ID int64
- RunID int64 `xorm:"index"`
- Run *ActionRun `xorm:"-"`
- RepoID int64 `xorm:"index"`
- OwnerID int64 `xorm:"index"`
- CommitSHA string `xorm:"index"`
+ RunID int64 `xorm:"index"`
+ Run *ActionRun `xorm:"-"`
+ RepoID int64 `xorm:"index"`
+ Repo *repo_model.Repository `xorm:"-"`
+ OwnerID int64 `xorm:"index"`
+ CommitSHA string `xorm:"index"`
IsForkPullRequest bool
Name string `xorm:"VARCHAR(255)"`
Attempt int64
@@ -49,7 +51,7 @@ func (job *ActionRunJob) Duration() time.Duration {
func (job *ActionRunJob) LoadRun(ctx context.Context) error {
if job.Run == nil {
- run, err := GetRunByID(ctx, job.RunID)
+ run, err := GetRunByRepoAndID(ctx, job.RepoID, job.RunID)
if err != nil {
return err
}
@@ -58,6 +60,17 @@ func (job *ActionRunJob) LoadRun(ctx context.Context) error {
return nil
}
+func (job *ActionRunJob) LoadRepo(ctx context.Context) error {
+ if job.Repo == nil {
+ repo, err := repo_model.GetRepositoryByID(ctx, job.RepoID)
+ if err != nil {
+ return err
+ }
+ job.Repo = repo
+ }
+ return nil
+}
+
// LoadAttributes load Run if not loaded
func (job *ActionRunJob) LoadAttributes(ctx context.Context) error {
if job == nil {
@@ -83,7 +96,7 @@ func GetRunJobByID(ctx context.Context, id int64) (*ActionRunJob, error) {
return &job, nil
}
-func GetRunJobsByRunID(ctx context.Context, runID int64) ([]*ActionRunJob, error) {
+func GetRunJobsByRunID(ctx context.Context, runID int64) (ActionJobList, error) {
var jobs []*ActionRunJob
if err := db.GetEngine(ctx).Where("run_id=?", runID).OrderBy("id").Find(&jobs); err != nil {
return nil, err
@@ -129,7 +142,7 @@ func UpdateRunJob(ctx context.Context, job *ActionRunJob, cond builder.Cond, col
{
// Other goroutines may aggregate the status of the run and update it too.
// So we need load the run and its jobs before updating the run.
- run, err := GetRunByID(ctx, job.RunID)
+ run, err := GetRunByRepoAndID(ctx, job.RepoID, job.RunID)
if err != nil {
return 0, err
}
@@ -172,12 +185,12 @@ func AggregateJobStatus(jobs []*ActionRunJob) Status {
return StatusSuccess
case hasCancelled:
return StatusCancelled
- case hasFailure:
- return StatusFailure
case hasRunning:
return StatusRunning
case hasWaiting:
return StatusWaiting
+ case hasFailure:
+ return StatusFailure
case hasBlocked:
return StatusBlocked
default:
diff --git a/models/actions/run_job_list.go b/models/actions/run_job_list.go
index 6c5d3b3252..5f7bb62878 100644
--- a/models/actions/run_job_list.go
+++ b/models/actions/run_job_list.go
@@ -7,6 +7,7 @@ import (
"context"
"code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/timeutil"
@@ -21,7 +22,33 @@ func (jobs ActionJobList) GetRunIDs() []int64 {
})
}
+func (jobs ActionJobList) LoadRepos(ctx context.Context) error {
+ repoIDs := container.FilterSlice(jobs, func(j *ActionRunJob) (int64, bool) {
+ return j.RepoID, j.RepoID != 0 && j.Repo == nil
+ })
+ if len(repoIDs) == 0 {
+ return nil
+ }
+
+ repos := make(map[int64]*repo_model.Repository, len(repoIDs))
+ if err := db.GetEngine(ctx).In("id", repoIDs).Find(&repos); err != nil {
+ return err
+ }
+ for _, j := range jobs {
+ if j.RepoID > 0 && j.Repo == nil {
+ j.Repo = repos[j.RepoID]
+ }
+ }
+ return nil
+}
+
func (jobs ActionJobList) LoadRuns(ctx context.Context, withRepo bool) error {
+ if withRepo {
+ if err := jobs.LoadRepos(ctx); err != nil {
+ return err
+ }
+ }
+
runIDs := jobs.GetRunIDs()
runs := make(map[int64]*ActionRun, len(runIDs))
if err := db.GetEngine(ctx).In("id", runIDs).Find(&runs); err != nil {
@@ -30,15 +57,9 @@ func (jobs ActionJobList) LoadRuns(ctx context.Context, withRepo bool) error {
for _, j := range jobs {
if j.RunID > 0 && j.Run == nil {
j.Run = runs[j.RunID]
+ j.Run.Repo = j.Repo
}
}
- if withRepo {
- var runsList RunList = make([]*ActionRun, 0, len(runs))
- for _, r := range runs {
- runsList = append(runsList, r)
- }
- return runsList.LoadRepos(ctx)
- }
return nil
}
@@ -59,22 +80,31 @@ type FindRunJobOptions struct {
func (opts FindRunJobOptions) ToConds() builder.Cond {
cond := builder.NewCond()
if opts.RunID > 0 {
- cond = cond.And(builder.Eq{"run_id": opts.RunID})
+ cond = cond.And(builder.Eq{"`action_run_job`.run_id": opts.RunID})
}
if opts.RepoID > 0 {
- cond = cond.And(builder.Eq{"repo_id": opts.RepoID})
- }
- if opts.OwnerID > 0 {
- cond = cond.And(builder.Eq{"owner_id": opts.OwnerID})
+ cond = cond.And(builder.Eq{"`action_run_job`.repo_id": opts.RepoID})
}
if opts.CommitSHA != "" {
- cond = cond.And(builder.Eq{"commit_sha": opts.CommitSHA})
+ cond = cond.And(builder.Eq{"`action_run_job`.commit_sha": opts.CommitSHA})
}
if len(opts.Statuses) > 0 {
- cond = cond.And(builder.In("status", opts.Statuses))
+ cond = cond.And(builder.In("`action_run_job`.status", opts.Statuses))
}
if opts.UpdatedBefore > 0 {
- cond = cond.And(builder.Lt{"updated": opts.UpdatedBefore})
+ cond = cond.And(builder.Lt{"`action_run_job`.updated": opts.UpdatedBefore})
}
return cond
}
+
+func (opts FindRunJobOptions) ToJoins() []db.JoinFunc {
+ if opts.OwnerID > 0 {
+ return []db.JoinFunc{
+ func(sess db.Engine) error {
+ sess.Join("INNER", "repository", "repository.id = repo_id AND repository.owner_id = ?", opts.OwnerID)
+ return nil
+ },
+ }
+ }
+ return nil
+}
diff --git a/models/actions/run_job_status_test.go b/models/actions/run_job_status_test.go
index 523d38327e..b9ae9f34bf 100644
--- a/models/actions/run_job_status_test.go
+++ b/models/actions/run_job_status_test.go
@@ -58,14 +58,14 @@ func TestAggregateJobStatus(t *testing.T) {
{[]Status{StatusCancelled, StatusRunning}, StatusCancelled},
{[]Status{StatusCancelled, StatusBlocked}, StatusCancelled},
- // failure with other status, fail fast
- // Should "running" win? Maybe no: old code does make "running" win, but GitHub does fail fast.
+ // failure with other status, usually fail fast, but "running" wins to match GitHub's behavior
+ // another reason that we can't make "failure" wins over "running": it would cause a weird behavior that user cannot cancel a workflow or get current running workflows correctly by filter after a job fail.
{[]Status{StatusFailure}, StatusFailure},
{[]Status{StatusFailure, StatusSuccess}, StatusFailure},
{[]Status{StatusFailure, StatusSkipped}, StatusFailure},
{[]Status{StatusFailure, StatusCancelled}, StatusCancelled},
- {[]Status{StatusFailure, StatusWaiting}, StatusFailure},
- {[]Status{StatusFailure, StatusRunning}, StatusFailure},
+ {[]Status{StatusFailure, StatusWaiting}, StatusWaiting},
+ {[]Status{StatusFailure, StatusRunning}, StatusRunning},
{[]Status{StatusFailure, StatusBlocked}, StatusFailure},
// skipped with other status
diff --git a/models/actions/run_list.go b/models/actions/run_list.go
index b9b9324e07..12c55e538e 100644
--- a/models/actions/run_list.go
+++ b/models/actions/run_list.go
@@ -72,39 +72,50 @@ type FindRunOptions struct {
TriggerEvent webhook_module.HookEventType
Approved bool // not util.OptionalBool, it works only when it's true
Status []Status
+ CommitSHA string
}
func (opts FindRunOptions) ToConds() builder.Cond {
cond := builder.NewCond()
if opts.RepoID > 0 {
- cond = cond.And(builder.Eq{"repo_id": opts.RepoID})
- }
- if opts.OwnerID > 0 {
- cond = cond.And(builder.Eq{"owner_id": opts.OwnerID})
+ cond = cond.And(builder.Eq{"`action_run`.repo_id": opts.RepoID})
}
if opts.WorkflowID != "" {
- cond = cond.And(builder.Eq{"workflow_id": opts.WorkflowID})
+ cond = cond.And(builder.Eq{"`action_run`.workflow_id": opts.WorkflowID})
}
if opts.TriggerUserID > 0 {
- cond = cond.And(builder.Eq{"trigger_user_id": opts.TriggerUserID})
+ cond = cond.And(builder.Eq{"`action_run`.trigger_user_id": opts.TriggerUserID})
}
if opts.Approved {
- cond = cond.And(builder.Gt{"approved_by": 0})
+ cond = cond.And(builder.Gt{"`action_run`.approved_by": 0})
}
if len(opts.Status) > 0 {
- cond = cond.And(builder.In("status", opts.Status))
+ cond = cond.And(builder.In("`action_run`.status", opts.Status))
}
if opts.Ref != "" {
- cond = cond.And(builder.Eq{"ref": opts.Ref})
+ cond = cond.And(builder.Eq{"`action_run`.ref": opts.Ref})
}
if opts.TriggerEvent != "" {
- cond = cond.And(builder.Eq{"trigger_event": opts.TriggerEvent})
+ cond = cond.And(builder.Eq{"`action_run`.trigger_event": opts.TriggerEvent})
+ }
+ if opts.CommitSHA != "" {
+ cond = cond.And(builder.Eq{"`action_run`.commit_sha": opts.CommitSHA})
}
return cond
}
+func (opts FindRunOptions) ToJoins() []db.JoinFunc {
+ if opts.OwnerID > 0 {
+ return []db.JoinFunc{func(sess db.Engine) error {
+ sess.Join("INNER", "repository", "repository.id = repo_id AND repository.owner_id = ?", opts.OwnerID)
+ return nil
+ }}
+ }
+ return nil
+}
+
func (opts FindRunOptions) ToOrders() string {
- return "`id` DESC"
+ return "`action_run`.`id` DESC"
}
type StatusInfo struct {
diff --git a/models/actions/runner.go b/models/actions/runner.go
index 97db0ca7ea..81d4249ae0 100644
--- a/models/actions/runner.go
+++ b/models/actions/runner.go
@@ -5,6 +5,7 @@ package actions
import (
"context"
+ "errors"
"fmt"
"strings"
"time"
@@ -14,6 +15,7 @@ import (
"code.gitea.io/gitea/models/shared/types"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/translation"
"code.gitea.io/gitea/modules/util"
@@ -57,6 +59,8 @@ type ActionRunner struct {
// Store labels defined in state file (default: .runner file) of `act_runner`
AgentLabels []string `xorm:"TEXT"`
+ // Store if this is a runner that only ever get one single job assigned
+ Ephemeral bool `xorm:"ephemeral NOT NULL DEFAULT false"`
Created timeutil.TimeStamp `xorm:"created"`
Updated timeutil.TimeStamp `xorm:"updated"`
@@ -84,9 +88,10 @@ func (r *ActionRunner) BelongsToOwnerType() types.OwnerType {
return types.OwnerTypeRepository
}
if r.OwnerID != 0 {
- if r.Owner.Type == user_model.UserTypeOrganization {
+ switch r.Owner.Type {
+ case user_model.UserTypeOrganization:
return types.OwnerTypeOrganization
- } else if r.Owner.Type == user_model.UserTypeIndividual {
+ case user_model.UserTypeIndividual:
return types.OwnerTypeIndividual
}
}
@@ -120,8 +125,15 @@ func (r *ActionRunner) IsOnline() bool {
return false
}
-// Editable checks if the runner is editable by the user
-func (r *ActionRunner) Editable(ownerID, repoID int64) bool {
+// EditableInContext checks if the runner is editable by the "context" owner/repo
+// ownerID == 0 and repoID == 0 means "admin" context, any runner including global runners could be edited
+// ownerID == 0 and repoID != 0 means "repo" context, any runner belonging to the given repo could be edited
+// ownerID != 0 and repoID == 0 means "owner(org/user)" context, any runner belonging to the given user/org could be edited
+// ownerID != 0 and repoID != 0 means "owner" OR "repo" context, legacy behavior, but we should forbid using it
+func (r *ActionRunner) EditableInContext(ownerID, repoID int64) bool {
+ if ownerID != 0 && repoID != 0 {
+ setting.PanicInDevOrTesting("ownerID and repoID should not be both set")
+ }
if ownerID == 0 && repoID == 0 {
return true
}
@@ -165,6 +177,12 @@ func init() {
db.RegisterModel(&ActionRunner{})
}
+// FindRunnerOptions
+// ownerID == 0 and repoID == 0 means any runner including global runners
+// repoID != 0 and WithAvailable == false means any runner for the given repo
+// repoID != 0 and WithAvailable == true means any runner for the given repo, parent user/org, and global runners
+// ownerID != 0 and repoID == 0 and WithAvailable == false means any runner for the given user/org
+// ownerID != 0 and repoID == 0 and WithAvailable == true means any runner for the given user/org and global runners
type FindRunnerOptions struct {
db.ListOptions
IDs []int64
@@ -281,6 +299,23 @@ func DeleteRunner(ctx context.Context, id int64) error {
return err
}
+// DeleteEphemeralRunner deletes a ephemeral runner by given ID.
+func DeleteEphemeralRunner(ctx context.Context, id int64) error {
+ runner, err := GetRunnerByID(ctx, id)
+ if err != nil {
+ if errors.Is(err, util.ErrNotExist) {
+ return nil
+ }
+ return err
+ }
+ if !runner.Ephemeral {
+ return nil
+ }
+
+ _, err = db.DeleteByID[ActionRunner](ctx, id)
+ return err
+}
+
// CreateRunner creates new runner.
func CreateRunner(ctx context.Context, t *ActionRunner) error {
if t.OwnerID != 0 && t.RepoID != 0 {
diff --git a/models/actions/runner_token_test.go b/models/actions/runner_token_test.go
index 159805e5f7..21614b7086 100644
--- a/models/actions/runner_token_test.go
+++ b/models/actions/runner_token_test.go
@@ -17,7 +17,7 @@ func TestGetLatestRunnerToken(t *testing.T) {
token := unittest.AssertExistsAndLoadBean(t, &ActionRunnerToken{ID: 3})
expectedToken, err := GetLatestRunnerToken(db.DefaultContext, 1, 0)
assert.NoError(t, err)
- assert.EqualValues(t, expectedToken, token)
+ assert.Equal(t, expectedToken, token)
}
func TestNewRunnerToken(t *testing.T) {
@@ -26,7 +26,7 @@ func TestNewRunnerToken(t *testing.T) {
assert.NoError(t, err)
expectedToken, err := GetLatestRunnerToken(db.DefaultContext, 1, 0)
assert.NoError(t, err)
- assert.EqualValues(t, expectedToken, token)
+ assert.Equal(t, expectedToken, token)
}
func TestUpdateRunnerToken(t *testing.T) {
@@ -36,5 +36,5 @@ func TestUpdateRunnerToken(t *testing.T) {
assert.NoError(t, UpdateRunnerToken(db.DefaultContext, token))
expectedToken, err := GetLatestRunnerToken(db.DefaultContext, 1, 0)
assert.NoError(t, err)
- assert.EqualValues(t, expectedToken, token)
+ assert.Equal(t, expectedToken, token)
}
diff --git a/models/actions/schedule.go b/models/actions/schedule.go
index 2edf483fe0..ffde5092e0 100644
--- a/models/actions/schedule.go
+++ b/models/actions/schedule.go
@@ -56,65 +56,54 @@ func CreateScheduleTask(ctx context.Context, rows []*ActionSchedule) error {
return nil
}
- // Begin transaction
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- // Loop through each schedule row
- for _, row := range rows {
- row.Title = util.EllipsisDisplayString(row.Title, 255)
- // Create new schedule row
- if err = db.Insert(ctx, row); err != nil {
- return err
- }
-
- // Loop through each schedule spec and create a new spec row
- now := time.Now()
-
- for _, spec := range row.Specs {
- specRow := &ActionScheduleSpec{
- RepoID: row.RepoID,
- ScheduleID: row.ID,
- Spec: spec,
- }
- // Parse the spec and check for errors
- schedule, err := specRow.Parse()
- if err != nil {
- continue // skip to the next spec if there's an error
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ // Loop through each schedule row
+ for _, row := range rows {
+ row.Title = util.EllipsisDisplayString(row.Title, 255)
+ // Create new schedule row
+ if err := db.Insert(ctx, row); err != nil {
+ return err
}
- specRow.Next = timeutil.TimeStamp(schedule.Next(now).Unix())
-
- // Insert the new schedule spec row
- if err = db.Insert(ctx, specRow); err != nil {
- return err
+ // Loop through each schedule spec and create a new spec row
+ now := time.Now()
+
+ for _, spec := range row.Specs {
+ specRow := &ActionScheduleSpec{
+ RepoID: row.RepoID,
+ ScheduleID: row.ID,
+ Spec: spec,
+ }
+ // Parse the spec and check for errors
+ schedule, err := specRow.Parse()
+ if err != nil {
+ continue // skip to the next spec if there's an error
+ }
+
+ specRow.Next = timeutil.TimeStamp(schedule.Next(now).Unix())
+
+ // Insert the new schedule spec row
+ if err = db.Insert(ctx, specRow); err != nil {
+ return err
+ }
}
}
- }
-
- // Commit transaction
- return committer.Commit()
+ return nil
+ })
}
func DeleteScheduleTaskByRepo(ctx context.Context, id int64) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- if _, err := db.GetEngine(ctx).Delete(&ActionSchedule{RepoID: id}); err != nil {
- return err
- }
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ if _, err := db.GetEngine(ctx).Delete(&ActionSchedule{RepoID: id}); err != nil {
+ return err
+ }
- if _, err := db.GetEngine(ctx).Delete(&ActionScheduleSpec{RepoID: id}); err != nil {
- return err
- }
+ if _, err := db.GetEngine(ctx).Delete(&ActionScheduleSpec{RepoID: id}); err != nil {
+ return err
+ }
- return committer.Commit()
+ return nil
+ })
}
func CleanRepoScheduleTasks(ctx context.Context, repo *repo_model.Repository) ([]*ActionRunJob, error) {
diff --git a/models/actions/status.go b/models/actions/status.go
index eda2234137..2b1d70613c 100644
--- a/models/actions/status.go
+++ b/models/actions/status.go
@@ -4,6 +4,8 @@
package actions
import (
+ "slices"
+
"code.gitea.io/gitea/modules/translation"
runnerv1 "code.gitea.io/actions-proto-go/runner/v1"
@@ -88,12 +90,7 @@ func (s Status) IsBlocked() bool {
// In returns whether s is one of the given statuses
func (s Status) In(statuses ...Status) bool {
- for _, v := range statuses {
- if s == v {
- return true
- }
- }
- return false
+ return slices.Contains(statuses, s)
}
func (s Status) AsResult() runnerv1.Result {
diff --git a/models/actions/task.go b/models/actions/task.go
index 9f13ff94c9..c1306a8418 100644
--- a/models/actions/task.go
+++ b/models/actions/task.go
@@ -6,6 +6,7 @@ package actions
import (
"context"
"crypto/subtle"
+ "errors"
"fmt"
"time"
@@ -277,14 +278,13 @@ func CreateTaskForRunner(ctx context.Context, runner *ActionRunner) (*ActionTask
return nil, false, err
}
- var workflowJob *jobparser.Job
- if gots, err := jobparser.Parse(job.WorkflowPayload); err != nil {
+ parsedWorkflows, err := jobparser.Parse(job.WorkflowPayload)
+ if err != nil {
return nil, false, fmt.Errorf("parse workflow of job %d: %w", job.ID, err)
- } else if len(gots) != 1 {
+ } else if len(parsedWorkflows) != 1 {
return nil, false, fmt.Errorf("workflow of job %d: not single workflow", job.ID)
- } else { //nolint:revive
- _, workflowJob = gots[0].Job()
}
+ _, workflowJob := parsedWorkflows[0].Job()
if _, err := e.Insert(task); err != nil {
return nil, false, err
@@ -335,6 +335,11 @@ func UpdateTask(ctx context.Context, task *ActionTask, cols ...string) error {
sess.Cols(cols...)
}
_, err := sess.Update(task)
+
+ // Automatically delete the ephemeral runner if the task is done
+ if err == nil && task.Status.IsDone() && util.SliceContainsString(cols, "status") {
+ return DeleteEphemeralRunner(ctx, task.RunnerID)
+ }
return err
}
@@ -347,78 +352,70 @@ func UpdateTaskByState(ctx context.Context, runnerID int64, state *runnerv1.Task
stepStates[v.Id] = v
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return nil, err
- }
- defer committer.Close()
-
- e := db.GetEngine(ctx)
-
- task := &ActionTask{}
- if has, err := e.ID(state.Id).Get(task); err != nil {
- return nil, err
- } else if !has {
- return nil, util.ErrNotExist
- } else if runnerID != task.RunnerID {
- return nil, fmt.Errorf("invalid runner for task")
- }
-
- if task.Status.IsDone() {
- // the state is final, do nothing
- return task, nil
- }
+ return db.WithTx2(ctx, func(ctx context.Context) (*ActionTask, error) {
+ e := db.GetEngine(ctx)
- // state.Result is not unspecified means the task is finished
- if state.Result != runnerv1.Result_RESULT_UNSPECIFIED {
- task.Status = Status(state.Result)
- task.Stopped = timeutil.TimeStamp(state.StoppedAt.AsTime().Unix())
- if err := UpdateTask(ctx, task, "status", "stopped"); err != nil {
- return nil, err
- }
- if _, err := UpdateRunJob(ctx, &ActionRunJob{
- ID: task.JobID,
- Status: task.Status,
- Stopped: task.Stopped,
- }, nil); err != nil {
+ task := &ActionTask{}
+ if has, err := e.ID(state.Id).Get(task); err != nil {
return nil, err
+ } else if !has {
+ return nil, util.ErrNotExist
+ } else if runnerID != task.RunnerID {
+ return nil, errors.New("invalid runner for task")
}
- } else {
- // Force update ActionTask.Updated to avoid the task being judged as a zombie task
- task.Updated = timeutil.TimeStampNow()
- if err := UpdateTask(ctx, task, "updated"); err != nil {
- return nil, err
- }
- }
- if err := task.LoadAttributes(ctx); err != nil {
- return nil, err
- }
-
- for _, step := range task.Steps {
- var result runnerv1.Result
- if v, ok := stepStates[step.Index]; ok {
- result = v.Result
- step.LogIndex = v.LogIndex
- step.LogLength = v.LogLength
- step.Started = convertTimestamp(v.StartedAt)
- step.Stopped = convertTimestamp(v.StoppedAt)
+ if task.Status.IsDone() {
+ // the state is final, do nothing
+ return task, nil
}
- if result != runnerv1.Result_RESULT_UNSPECIFIED {
- step.Status = Status(result)
- } else if step.Started != 0 {
- step.Status = StatusRunning
+
+ // state.Result is not unspecified means the task is finished
+ if state.Result != runnerv1.Result_RESULT_UNSPECIFIED {
+ task.Status = Status(state.Result)
+ task.Stopped = timeutil.TimeStamp(state.StoppedAt.AsTime().Unix())
+ if err := UpdateTask(ctx, task, "status", "stopped"); err != nil {
+ return nil, err
+ }
+ if _, err := UpdateRunJob(ctx, &ActionRunJob{
+ ID: task.JobID,
+ Status: task.Status,
+ Stopped: task.Stopped,
+ }, nil); err != nil {
+ return nil, err
+ }
+ } else {
+ // Force update ActionTask.Updated to avoid the task being judged as a zombie task
+ task.Updated = timeutil.TimeStampNow()
+ if err := UpdateTask(ctx, task, "updated"); err != nil {
+ return nil, err
+ }
}
- if _, err := e.ID(step.ID).Update(step); err != nil {
+
+ if err := task.LoadAttributes(ctx); err != nil {
return nil, err
}
- }
- if err := committer.Commit(); err != nil {
- return nil, err
- }
+ for _, step := range task.Steps {
+ var result runnerv1.Result
+ if v, ok := stepStates[step.Index]; ok {
+ result = v.Result
+ step.LogIndex = v.LogIndex
+ step.LogLength = v.LogLength
+ step.Started = convertTimestamp(v.StartedAt)
+ step.Stopped = convertTimestamp(v.StoppedAt)
+ }
+ if result != runnerv1.Result_RESULT_UNSPECIFIED {
+ step.Status = Status(result)
+ } else if step.Started != 0 {
+ step.Status = StatusRunning
+ }
+ if _, err := e.ID(step.ID).Update(step); err != nil {
+ return nil, err
+ }
+ }
- return task, nil
+ return task, nil
+ })
}
func StopTask(ctx context.Context, taskID int64, status Status) error {
diff --git a/models/actions/task_list.go b/models/actions/task_list.go
index df4b43c5ef..0c80397899 100644
--- a/models/actions/task_list.go
+++ b/models/actions/task_list.go
@@ -48,6 +48,7 @@ func (tasks TaskList) LoadAttributes(ctx context.Context) error {
type FindTaskOptions struct {
db.ListOptions
RepoID int64
+ JobID int64
OwnerID int64
CommitSHA string
Status Status
@@ -61,6 +62,9 @@ func (opts FindTaskOptions) ToConds() builder.Cond {
if opts.RepoID > 0 {
cond = cond.And(builder.Eq{"repo_id": opts.RepoID})
}
+ if opts.JobID > 0 {
+ cond = cond.And(builder.Eq{"job_id": opts.JobID})
+ }
if opts.OwnerID > 0 {
cond = cond.And(builder.Eq{"owner_id": opts.OwnerID})
}
diff --git a/models/actions/tasks_version.go b/models/actions/tasks_version.go
index 96c5468c1a..b686ce2443 100644
--- a/models/actions/tasks_version.go
+++ b/models/actions/tasks_version.go
@@ -73,33 +73,29 @@ func increaseTasksVersionByScope(ctx context.Context, ownerID, repoID int64) err
}
func IncreaseTaskVersion(ctx context.Context, ownerID, repoID int64) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- // 1. increase global
- if err := increaseTasksVersionByScope(ctx, 0, 0); err != nil {
- log.Error("IncreaseTasksVersionByScope(Global): %v", err)
- return err
- }
-
- // 2. increase owner
- if ownerID > 0 {
- if err := increaseTasksVersionByScope(ctx, ownerID, 0); err != nil {
- log.Error("IncreaseTasksVersionByScope(Owner): %v", err)
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ // 1. increase global
+ if err := increaseTasksVersionByScope(ctx, 0, 0); err != nil {
+ log.Error("IncreaseTasksVersionByScope(Global): %v", err)
return err
}
- }
- // 3. increase repo
- if repoID > 0 {
- if err := increaseTasksVersionByScope(ctx, 0, repoID); err != nil {
- log.Error("IncreaseTasksVersionByScope(Repo): %v", err)
- return err
+ // 2. increase owner
+ if ownerID > 0 {
+ if err := increaseTasksVersionByScope(ctx, ownerID, 0); err != nil {
+ log.Error("IncreaseTasksVersionByScope(Owner): %v", err)
+ return err
+ }
+ }
+
+ // 3. increase repo
+ if repoID > 0 {
+ if err := increaseTasksVersionByScope(ctx, 0, repoID); err != nil {
+ log.Error("IncreaseTasksVersionByScope(Repo): %v", err)
+ return err
+ }
}
- }
- return committer.Commit()
+ return nil
+ })
}
diff --git a/models/actions/utils.go b/models/actions/utils.go
index 12657942fc..f6ba661ae3 100644
--- a/models/actions/utils.go
+++ b/models/actions/utils.go
@@ -82,3 +82,22 @@ func calculateDuration(started, stopped timeutil.TimeStamp, status Status) time.
}
return timeSince(s).Truncate(time.Second)
}
+
+// best effort function to convert an action schedule to action run, to be used in GenerateGiteaContext
+func (s *ActionSchedule) ToActionRun() *ActionRun {
+ return &ActionRun{
+ Title: s.Title,
+ RepoID: s.RepoID,
+ Repo: s.Repo,
+ OwnerID: s.OwnerID,
+ WorkflowID: s.WorkflowID,
+ TriggerUserID: s.TriggerUserID,
+ TriggerUser: s.TriggerUser,
+ Ref: s.Ref,
+ CommitSHA: s.CommitSHA,
+ Event: s.Event,
+ EventPayload: s.EventPayload,
+ Created: s.Created,
+ Updated: s.Updated,
+ }
+}
diff --git a/models/actions/variable.go b/models/actions/variable.go
index 1929cffbd8..7154843c17 100644
--- a/models/actions/variable.go
+++ b/models/actions/variable.go
@@ -6,10 +6,12 @@ package actions
import (
"context"
"strings"
+ "unicode/utf8"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
"xorm.io/builder"
)
@@ -32,26 +34,39 @@ type ActionVariable struct {
RepoID int64 `xorm:"INDEX UNIQUE(owner_repo_name)"`
Name string `xorm:"UNIQUE(owner_repo_name) NOT NULL"`
Data string `xorm:"LONGTEXT NOT NULL"`
+ Description string `xorm:"TEXT"`
CreatedUnix timeutil.TimeStamp `xorm:"created NOT NULL"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated"`
}
+const (
+ VariableDataMaxLength = 65536
+ VariableDescriptionMaxLength = 4096
+)
+
func init() {
db.RegisterModel(new(ActionVariable))
}
-func InsertVariable(ctx context.Context, ownerID, repoID int64, name, data string) (*ActionVariable, error) {
+func InsertVariable(ctx context.Context, ownerID, repoID int64, name, data, description string) (*ActionVariable, error) {
if ownerID != 0 && repoID != 0 {
// It's trying to create a variable that belongs to a repository, but OwnerID has been set accidentally.
// Remove OwnerID to avoid confusion; it's not worth returning an error here.
ownerID = 0
}
+ if utf8.RuneCountInString(data) > VariableDataMaxLength {
+ return nil, util.NewInvalidArgumentErrorf("data too long")
+ }
+
+ description = util.TruncateRunes(description, VariableDescriptionMaxLength)
+
variable := &ActionVariable{
- OwnerID: ownerID,
- RepoID: repoID,
- Name: strings.ToUpper(name),
- Data: data,
+ OwnerID: ownerID,
+ RepoID: repoID,
+ Name: strings.ToUpper(name),
+ Data: data,
+ Description: description,
}
return variable, db.Insert(ctx, variable)
}
@@ -96,6 +111,12 @@ func FindVariables(ctx context.Context, opts FindVariablesOpts) ([]*ActionVariab
}
func UpdateVariableCols(ctx context.Context, variable *ActionVariable, cols ...string) (bool, error) {
+ if utf8.RuneCountInString(variable.Data) > VariableDataMaxLength {
+ return false, util.NewInvalidArgumentErrorf("data too long")
+ }
+
+ variable.Description = util.TruncateRunes(variable.Description, VariableDescriptionMaxLength)
+
variable.Name = strings.ToUpper(variable.Name)
count, err := db.GetEngine(ctx).
ID(variable.ID).
diff --git a/models/activities/action.go b/models/activities/action.go
index c16c49c0ac..1a0dfe6412 100644
--- a/models/activities/action.go
+++ b/models/activities/action.go
@@ -9,6 +9,7 @@ import (
"fmt"
"net/url"
"path"
+ "slices"
"strconv"
"strings"
"time"
@@ -125,12 +126,7 @@ func (at ActionType) String() string {
}
func (at ActionType) InActions(actions ...string) bool {
- for _, action := range actions {
- if action == at.String() {
- return true
- }
- }
- return false
+ return slices.Contains(actions, at.String())
}
// Action represents user operation type and other information to
@@ -172,7 +168,10 @@ func (a *Action) TableIndices() []*schemas.Index {
cuIndex := schemas.NewIndex("c_u", schemas.IndexType)
cuIndex.AddColumn("user_id", "is_deleted")
- indices := []*schemas.Index{actUserIndex, repoIndex, cudIndex, cuIndex}
+ actUserUserIndex := schemas.NewIndex("au_c_u", schemas.IndexType)
+ actUserUserIndex.AddColumn("act_user_id", "created_unix", "user_id")
+
+ indices := []*schemas.Index{actUserIndex, repoIndex, cudIndex, cuIndex, actUserUserIndex}
return indices
}
@@ -188,7 +187,7 @@ func (a *Action) LoadActUser(ctx context.Context) {
return
}
var err error
- a.ActUser, err = user_model.GetUserByID(ctx, a.ActUserID)
+ a.ActUser, err = user_model.GetPossibleUserByID(ctx, a.ActUserID)
if err == nil {
return
} else if user_model.IsErrUserNotExist(err) {
@@ -442,6 +441,7 @@ type GetFeedsOptions struct {
OnlyPerformedBy bool // only actions performed by requested user
IncludeDeleted bool // include deleted actions
Date string // the day we want activity for: YYYY-MM-DD
+ DontCount bool // do counting in GetFeeds
}
// ActivityReadable return whether doer can read activities of user
@@ -526,7 +526,7 @@ func ActivityQueryCondition(ctx context.Context, opts GetFeedsOptions) (builder.
if opts.RequestedTeam != nil {
env := repo_model.AccessibleTeamReposEnv(organization.OrgFromUser(opts.RequestedUser), opts.RequestedTeam)
- teamRepoIDs, err := env.RepoIDs(ctx, 1, opts.RequestedUser.NumRepos)
+ teamRepoIDs, err := env.RepoIDs(ctx)
if err != nil {
return nil, fmt.Errorf("GetTeamRepositories: %w", err)
}
diff --git a/models/activities/action_list.go b/models/activities/action_list.go
index f7ea48f03e..b52cf7ee49 100644
--- a/models/activities/action_list.go
+++ b/models/activities/action_list.go
@@ -5,6 +5,7 @@ package activities
import (
"context"
+ "errors"
"fmt"
"strconv"
@@ -205,7 +206,7 @@ func (actions ActionList) LoadIssues(ctx context.Context) error {
// GetFeeds returns actions according to the provided options
func GetFeeds(ctx context.Context, opts GetFeedsOptions) (ActionList, int64, error) {
if opts.RequestedUser == nil && opts.RequestedTeam == nil && opts.RequestedRepo == nil {
- return nil, 0, fmt.Errorf("need at least one of these filters: RequestedUser, RequestedTeam, RequestedRepo")
+ return nil, 0, errors.New("need at least one of these filters: RequestedUser, RequestedTeam, RequestedRepo")
}
var err error
@@ -243,7 +244,11 @@ func GetFeeds(ctx context.Context, opts GetFeedsOptions) (ActionList, int64, err
sess := db.GetEngine(ctx).Where(cond)
sess = db.SetSessionPagination(sess, &opts)
- count, err = sess.Desc("`action`.created_unix").FindAndCount(&actions)
+ if opts.DontCount {
+ err = sess.Desc("`action`.created_unix").Find(&actions)
+ } else {
+ count, err = sess.Desc("`action`.created_unix").FindAndCount(&actions)
+ }
if err != nil {
return nil, 0, fmt.Errorf("FindAndCount: %w", err)
}
@@ -257,11 +262,13 @@ func GetFeeds(ctx context.Context, opts GetFeedsOptions) (ActionList, int64, err
return nil, 0, fmt.Errorf("Find(actionsIDs): %w", err)
}
- count, err = db.GetEngine(ctx).Where(cond).
- Table("action").
- Cols("`action`.id").Count()
- if err != nil {
- return nil, 0, fmt.Errorf("Count: %w", err)
+ if !opts.DontCount {
+ count, err = db.GetEngine(ctx).Where(cond).
+ Table("action").
+ Cols("`action`.id").Count()
+ if err != nil {
+ return nil, 0, fmt.Errorf("Count: %w", err)
+ }
}
if err := db.GetEngine(ctx).In("`action`.id", actionIDs).Desc("`action`.created_unix").Find(&actions); err != nil {
@@ -275,3 +282,9 @@ func GetFeeds(ctx context.Context, opts GetFeedsOptions) (ActionList, int64, err
return actions, count, nil
}
+
+func CountUserFeeds(ctx context.Context, userID int64) (int64, error) {
+ return db.GetEngine(ctx).Where("user_id = ?", userID).
+ And("is_deleted = ?", false).
+ Count(&Action{})
+}
diff --git a/models/activities/action_test.go b/models/activities/action_test.go
index ee2a225a3e..ff311ac891 100644
--- a/models/activities/action_test.go
+++ b/models/activities/action_test.go
@@ -130,7 +130,7 @@ func TestDeleteIssueActions(t *testing.T) {
// load an issue
issue := unittest.AssertExistsAndLoadBean(t, &issue_model.Issue{ID: 4})
- assert.NotEqualValues(t, issue.ID, issue.Index) // it needs to use different ID/Index to test the DeleteIssueActions to delete some actions by IssueIndex
+ assert.NotEqual(t, issue.ID, issue.Index) // it needs to use different ID/Index to test the DeleteIssueActions to delete some actions by IssueIndex
// insert a comment
err := db.Insert(db.DefaultContext, &issue_model.Comment{Type: issue_model.CommentTypeComment, IssueID: issue.ID})
diff --git a/models/activities/notification_list.go b/models/activities/notification_list.go
index 0cbb91df3c..6539e14ea2 100644
--- a/models/activities/notification_list.go
+++ b/models/activities/notification_list.go
@@ -70,17 +70,9 @@ func (opts FindNotificationOptions) ToOrders() string {
// for each watcher, or updates it if already exists
// receiverID > 0 just send to receiver, else send to all watcher
func CreateOrUpdateIssueNotifications(ctx context.Context, issueID, commentID, notificationAuthorID, receiverID int64) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- if err := createOrUpdateIssueNotifications(ctx, issueID, commentID, notificationAuthorID, receiverID); err != nil {
- return err
- }
-
- return committer.Commit()
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ return createOrUpdateIssueNotifications(ctx, issueID, commentID, notificationAuthorID, receiverID)
+ })
}
func createOrUpdateIssueNotifications(ctx context.Context, issueID, commentID, notificationAuthorID, receiverID int64) error {
@@ -208,10 +200,7 @@ func (nl NotificationList) LoadRepos(ctx context.Context) (repo_model.Repository
repos := make(map[int64]*repo_model.Repository, len(repoIDs))
left := len(repoIDs)
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
rows, err := db.GetEngine(ctx).
In("id", repoIDs[:limit]).
Rows(new(repo_model.Repository))
@@ -282,10 +271,7 @@ func (nl NotificationList) LoadIssues(ctx context.Context) ([]int, error) {
issues := make(map[int64]*issues_model.Issue, len(issueIDs))
left := len(issueIDs)
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
rows, err := db.GetEngine(ctx).
In("id", issueIDs[:limit]).
Rows(new(issues_model.Issue))
@@ -377,10 +363,7 @@ func (nl NotificationList) LoadUsers(ctx context.Context) ([]int, error) {
users := make(map[int64]*user_model.User, len(userIDs))
left := len(userIDs)
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
rows, err := db.GetEngine(ctx).
In("id", userIDs[:limit]).
Rows(new(user_model.User))
@@ -428,10 +411,7 @@ func (nl NotificationList) LoadComments(ctx context.Context) ([]int, error) {
comments := make(map[int64]*issues_model.Comment, len(commentIDs))
left := len(commentIDs)
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
rows, err := db.GetEngine(ctx).
In("id", commentIDs[:limit]).
Rows(new(issues_model.Comment))
diff --git a/models/activities/notification_test.go b/models/activities/notification_test.go
index 52f0eacba1..5d2a29bc36 100644
--- a/models/activities/notification_test.go
+++ b/models/activities/notification_test.go
@@ -44,11 +44,11 @@ func TestNotificationsForUser(t *testing.T) {
assert.NoError(t, err)
if assert.Len(t, notfs, 3) {
assert.EqualValues(t, 5, notfs[0].ID)
- assert.EqualValues(t, user.ID, notfs[0].UserID)
+ assert.Equal(t, user.ID, notfs[0].UserID)
assert.EqualValues(t, 4, notfs[1].ID)
- assert.EqualValues(t, user.ID, notfs[1].UserID)
+ assert.Equal(t, user.ID, notfs[1].UserID)
assert.EqualValues(t, 2, notfs[2].ID)
- assert.EqualValues(t, user.ID, notfs[2].UserID)
+ assert.Equal(t, user.ID, notfs[2].UserID)
}
}
@@ -58,7 +58,7 @@ func TestNotification_GetRepo(t *testing.T) {
repo, err := notf.GetRepo(db.DefaultContext)
assert.NoError(t, err)
assert.Equal(t, repo, notf.Repository)
- assert.EqualValues(t, notf.RepoID, repo.ID)
+ assert.Equal(t, notf.RepoID, repo.ID)
}
func TestNotification_GetIssue(t *testing.T) {
@@ -67,7 +67,7 @@ func TestNotification_GetIssue(t *testing.T) {
issue, err := notf.GetIssue(db.DefaultContext)
assert.NoError(t, err)
assert.Equal(t, issue, notf.Issue)
- assert.EqualValues(t, notf.IssueID, issue.ID)
+ assert.Equal(t, notf.IssueID, issue.ID)
}
func TestGetNotificationCount(t *testing.T) {
@@ -136,5 +136,5 @@ func TestSetIssueReadBy(t *testing.T) {
nt, err := activities_model.GetIssueNotification(db.DefaultContext, user.ID, issue.ID)
assert.NoError(t, err)
- assert.EqualValues(t, activities_model.NotificationStatusRead, nt.Status)
+ assert.Equal(t, activities_model.NotificationStatusRead, nt.Status)
}
diff --git a/models/activities/repo_activity.go b/models/activities/repo_activity.go
index 3ccdbd47d3..aeaa452c9e 100644
--- a/models/activities/repo_activity.go
+++ b/models/activities/repo_activity.go
@@ -139,10 +139,7 @@ func GetActivityStatsTopAuthors(ctx context.Context, repo *repo_model.Repository
return v[i].Commits > v[j].Commits
})
- cnt := count
- if cnt > len(v) {
- cnt = len(v)
- }
+ cnt := min(count, len(v))
return v[:cnt], nil
}
diff --git a/models/activities/statistic.go b/models/activities/statistic.go
index ff81ad78a1..940651d359 100644
--- a/models/activities/statistic.go
+++ b/models/activities/statistic.go
@@ -17,13 +17,16 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
)
// Statistic contains the database statistics
type Statistic struct {
Counter struct {
- User, Org, PublicKey,
+ UsersActive, UsersNotActive,
+ Org, PublicKey,
Repo, Watch, Star, Access,
Issue, IssueClosed, IssueOpen,
Comment, Oauth, Follow,
@@ -53,8 +56,20 @@ type IssueByRepositoryCount struct {
// GetStatistic returns the database statistics
func GetStatistic(ctx context.Context) (stats Statistic) {
e := db.GetEngine(ctx)
- stats.Counter.User = user_model.CountUsers(ctx, nil)
- stats.Counter.Org, _ = db.Count[organization.Organization](ctx, organization.FindOrgOptions{IncludePrivate: true})
+
+ // Number of active users
+ usersActiveOpts := user_model.CountUserFilter{
+ IsActive: optional.Some(true),
+ }
+ stats.Counter.UsersActive = user_model.CountUsers(ctx, &usersActiveOpts)
+
+ // Number of inactive users
+ usersNotActiveOpts := user_model.CountUserFilter{
+ IsActive: optional.Some(false),
+ }
+ stats.Counter.UsersNotActive = user_model.CountUsers(ctx, &usersNotActiveOpts)
+
+ stats.Counter.Org, _ = db.Count[organization.Organization](ctx, organization.FindOrgOptions{IncludeVisibility: structs.VisibleTypePrivate})
stats.Counter.PublicKey, _ = e.Count(new(asymkey_model.PublicKey))
stats.Counter.Repo, _ = repo_model.CountRepositories(ctx, repo_model.CountRepositoryOptions{})
stats.Counter.Watch, _ = e.Count(new(repo_model.Watch))
diff --git a/models/activities/user_heatmap.go b/models/activities/user_heatmap.go
index 1f8f0f590e..ef67838be7 100644
--- a/models/activities/user_heatmap.go
+++ b/models/activities/user_heatmap.go
@@ -66,7 +66,7 @@ func getUserHeatmapData(ctx context.Context, user *user_model.User, team *organi
Select(groupBy+" AS timestamp, count(user_id) as contributions").
Table("action").
Where(cond).
- And("created_unix > ?", timeutil.TimeStampNow()-31536000).
+ And("created_unix > ?", timeutil.TimeStampNow()-(366+7)*86400). // (366+7) days to include the first week for the heatmap
GroupBy(groupByName).
OrderBy("timestamp").
Find(&hdata)
diff --git a/models/asymkey/error.go b/models/asymkey/error.go
index 1ed6edd71a..b765624579 100644
--- a/models/asymkey/error.go
+++ b/models/asymkey/error.go
@@ -132,7 +132,7 @@ func IsErrGPGKeyParsing(err error) bool {
}
func (err ErrGPGKeyParsing) Error() string {
- return fmt.Sprintf("failed to parse gpg key %s", err.ParseError.Error())
+ return "failed to parse gpg key " + err.ParseError.Error()
}
// ErrGPGKeyNotExist represents a "GPGKeyNotExist" kind of error.
diff --git a/models/asymkey/gpg_key.go b/models/asymkey/gpg_key.go
index 7f35a96a59..38de7cbda6 100644
--- a/models/asymkey/gpg_key.go
+++ b/models/asymkey/gpg_key.go
@@ -5,6 +5,7 @@ package asymkey
import (
"context"
+ "errors"
"fmt"
"strings"
"time"
@@ -207,7 +208,7 @@ func parseGPGKey(ctx context.Context, ownerID int64, e *openpgp.Entity, verified
// deleteGPGKey does the actual key deletion
func deleteGPGKey(ctx context.Context, keyID string) (int64, error) {
if keyID == "" {
- return 0, fmt.Errorf("empty KeyId forbidden") // Should never happen but just to be sure
+ return 0, errors.New("empty KeyId forbidden") // Should never happen but just to be sure
}
// Delete imported key
n, err := db.GetEngine(ctx).Where("key_id=?", keyID).Delete(new(GPGKeyImport))
@@ -227,15 +228,15 @@ func DeleteGPGKey(ctx context.Context, doer *user_model.User, id int64) (err err
return fmt.Errorf("GetPublicKeyByID: %w", err)
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- if _, err = deleteGPGKey(ctx, key.KeyID); err != nil {
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ _, err = deleteGPGKey(ctx, key.KeyID)
return err
- }
+ })
+}
- return committer.Commit()
+func FindGPGKeyWithSubKeys(ctx context.Context, keyID string) ([]*GPGKey, error) {
+ return db.Find[GPGKey](ctx, FindGPGKeyOptions{
+ KeyID: keyID,
+ IncludeSubKeys: true,
+ })
}
diff --git a/models/asymkey/gpg_key_add.go b/models/asymkey/gpg_key_add.go
index ec2031088a..1c7d2c1da2 100644
--- a/models/asymkey/gpg_key_add.go
+++ b/models/asymkey/gpg_key_add.go
@@ -91,7 +91,7 @@ func AddGPGKey(ctx context.Context, ownerID int64, content, token, signature str
signer, err = openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token+"\r\n"), strings.NewReader(signature), nil)
}
if err != nil {
- log.Error("Unable to validate token signature. Error: %v", err)
+ log.Debug("AddGPGKey CheckArmoredDetachedSignature failed: %v", err)
return nil, ErrGPGInvalidTokenSignature{
ID: ekeys[0].PrimaryKey.KeyIdString(),
Wrapped: err,
diff --git a/models/asymkey/gpg_key_commit_verification.go b/models/asymkey/gpg_key_commit_verification.go
index 1591cd5068..b85374e073 100644
--- a/models/asymkey/gpg_key_commit_verification.go
+++ b/models/asymkey/gpg_key_commit_verification.go
@@ -4,6 +4,7 @@
package asymkey
import (
+ "errors"
"fmt"
"hash"
@@ -14,25 +15,6 @@ import (
"github.com/ProtonMail/go-crypto/openpgp/packet"
)
-// __________________ ________ ____ __.
-// / _____/\______ \/ _____/ | |/ _|____ ___.__.
-// / \ ___ | ___/ \ ___ | <_/ __ < | |
-// \ \_\ \| | \ \_\ \ | | \ ___/\___ |
-// \______ /|____| \______ / |____|__ \___ > ____|
-// \/ \/ \/ \/\/
-// _________ .__ __
-// \_ ___ \ ____ _____ _____ |__|/ |_
-// / \ \/ / _ \ / \ / \| \ __\
-// \ \___( <_> ) Y Y \ Y Y \ || |
-// \______ /\____/|__|_| /__|_| /__||__|
-// \/ \/ \/
-// ____ ____ .__ _____.__ __ .__
-// \ \ / /___________|__|/ ____\__| ____ _____ _/ |_|__| ____ ____
-// \ Y // __ \_ __ \ \ __\| |/ ___\\__ \\ __\ |/ _ \ / \
-// \ /\ ___/| | \/ || | | \ \___ / __ \| | | ( <_> ) | \
-// \___/ \___ >__| |__||__| |__|\___ >____ /__| |__|\____/|___| /
-// \/ \/ \/ \/
-
// This file provides functions relating commit verification
// CommitVerification represents a commit validation of signature
@@ -40,8 +22,8 @@ type CommitVerification struct {
Verified bool
Warning bool
Reason string
- SigningUser *user_model.User
- CommittingUser *user_model.User
+ SigningUser *user_model.User // if Verified, then SigningUser is non-nil
+ CommittingUser *user_model.User // if Verified, then CommittingUser is non-nil
SigningEmail string
SigningKey *GPGKey
SigningSSHKey *PublicKey
@@ -68,7 +50,7 @@ const (
func verifySign(s *packet.Signature, h hash.Hash, k *GPGKey) error {
// Check if key can sign
if !k.CanSign {
- return fmt.Errorf("key can not sign")
+ return errors.New("key can not sign")
}
// Decode key
pkey, err := base64DecPubKey(k.Content)
diff --git a/models/asymkey/gpg_key_common.go b/models/asymkey/gpg_key_common.go
index 1291cbc542..76f52a3ca4 100644
--- a/models/asymkey/gpg_key_common.go
+++ b/models/asymkey/gpg_key_common.go
@@ -7,6 +7,7 @@ import (
"bytes"
"crypto"
"encoding/base64"
+ "errors"
"fmt"
"hash"
"io"
@@ -75,7 +76,7 @@ func base64DecPubKey(content string) (*packet.PublicKey, error) {
// Check type
pkey, ok := p.(*packet.PublicKey)
if !ok {
- return nil, fmt.Errorf("key is not a public key")
+ return nil, errors.New("key is not a public key")
}
return pkey, nil
}
@@ -122,15 +123,15 @@ func readArmoredSign(r io.Reader) (body io.Reader, err error) {
func ExtractSignature(s string) (*packet.Signature, error) {
r, err := readArmoredSign(strings.NewReader(s))
if err != nil {
- return nil, fmt.Errorf("Failed to read signature armor")
+ return nil, errors.New("Failed to read signature armor")
}
p, err := packet.Read(r)
if err != nil {
- return nil, fmt.Errorf("Failed to read signature packet")
+ return nil, errors.New("Failed to read signature packet")
}
sig, ok := p.(*packet.Signature)
if !ok {
- return nil, fmt.Errorf("Packet is not a signature")
+ return nil, errors.New("Packet is not a signature")
}
return sig, nil
}
diff --git a/models/asymkey/gpg_key_verify.go b/models/asymkey/gpg_key_verify.go
index 6eedb5b7ba..55c64973b4 100644
--- a/models/asymkey/gpg_key_verify.go
+++ b/models/asymkey/gpg_key_verify.go
@@ -14,97 +14,76 @@ import (
"code.gitea.io/gitea/modules/log"
)
-// __________________ ________ ____ __.
-// / _____/\______ \/ _____/ | |/ _|____ ___.__.
-// / \ ___ | ___/ \ ___ | <_/ __ < | |
-// \ \_\ \| | \ \_\ \ | | \ ___/\___ |
-// \______ /|____| \______ / |____|__ \___ > ____|
-// \/ \/ \/ \/\/
-// ____ ____ .__ _____
-// \ \ / /___________|__|/ ____\__.__.
-// \ Y // __ \_ __ \ \ __< | |
-// \ /\ ___/| | \/ || | \___ |
-// \___/ \___ >__| |__||__| / ____|
-// \/ \/
-
// This file provides functions relating verifying gpg keys
// VerifyGPGKey marks a GPG key as verified
func VerifyGPGKey(ctx context.Context, ownerID int64, keyID, token, signature string) (string, error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return "", err
- }
- defer committer.Close()
-
- key := new(GPGKey)
-
- has, err := db.GetEngine(ctx).Where("owner_id = ? AND key_id = ?", ownerID, keyID).Get(key)
- if err != nil {
- return "", err
- } else if !has {
- return "", ErrGPGKeyNotExist{}
- }
-
- if err := key.LoadSubKeys(ctx); err != nil {
- return "", err
- }
+ return db.WithTx2(ctx, func(ctx context.Context) (string, error) {
+ key := new(GPGKey)
- sig, err := ExtractSignature(signature)
- if err != nil {
- return "", ErrGPGInvalidTokenSignature{
- ID: key.KeyID,
- Wrapped: err,
+ has, err := db.GetEngine(ctx).Where("owner_id = ? AND key_id = ?", ownerID, keyID).Get(key)
+ if err != nil {
+ return "", err
+ } else if !has {
+ return "", ErrGPGKeyNotExist{}
}
- }
- signer, err := hashAndVerifyWithSubKeys(sig, token, key)
- if err != nil {
- return "", ErrGPGInvalidTokenSignature{
- ID: key.KeyID,
- Wrapped: err,
+ if err := key.LoadSubKeys(ctx); err != nil {
+ return "", err
}
- }
- if signer == nil {
- signer, err = hashAndVerifyWithSubKeys(sig, token+"\n", key)
+
+ sig, err := ExtractSignature(signature)
if err != nil {
return "", ErrGPGInvalidTokenSignature{
ID: key.KeyID,
Wrapped: err,
}
}
- }
- if signer == nil {
- signer, err = hashAndVerifyWithSubKeys(sig, token+"\n\n", key)
+
+ signer, err := hashAndVerifyWithSubKeys(sig, token, key)
if err != nil {
return "", ErrGPGInvalidTokenSignature{
ID: key.KeyID,
Wrapped: err,
}
}
- }
-
- if signer == nil {
- log.Error("Unable to validate token signature. Error: %v", err)
- return "", ErrGPGInvalidTokenSignature{
- ID: key.KeyID,
+ if signer == nil {
+ signer, err = hashAndVerifyWithSubKeys(sig, token+"\n", key)
+ if err != nil {
+ return "", ErrGPGInvalidTokenSignature{
+ ID: key.KeyID,
+ Wrapped: err,
+ }
+ }
+ }
+ if signer == nil {
+ signer, err = hashAndVerifyWithSubKeys(sig, token+"\n\n", key)
+ if err != nil {
+ return "", ErrGPGInvalidTokenSignature{
+ ID: key.KeyID,
+ Wrapped: err,
+ }
+ }
}
- }
- if signer.PrimaryKeyID != key.KeyID && signer.KeyID != key.KeyID {
- return "", ErrGPGKeyNotExist{}
- }
+ if signer == nil {
+ log.Debug("VerifyGPGKey failed: no signer")
+ return "", ErrGPGInvalidTokenSignature{
+ ID: key.KeyID,
+ }
+ }
- key.Verified = true
- if _, err := db.GetEngine(ctx).ID(key.ID).SetExpr("verified", true).Update(new(GPGKey)); err != nil {
- return "", err
- }
+ if signer.PrimaryKeyID != key.KeyID && signer.KeyID != key.KeyID {
+ return "", ErrGPGKeyNotExist{}
+ }
- if err := committer.Commit(); err != nil {
- return "", err
- }
+ key.Verified = true
+ if _, err := db.GetEngine(ctx).ID(key.ID).SetExpr("verified", true).Update(new(GPGKey)); err != nil {
+ return "", err
+ }
- return key.KeyID, nil
+ return key.KeyID, nil
+ })
}
// VerificationToken returns token for the user that will be valid in minutes (time)
diff --git a/models/asymkey/ssh_key.go b/models/asymkey/ssh_key.go
index 7a18732c32..87205f0651 100644
--- a/models/asymkey/ssh_key.go
+++ b/models/asymkey/ssh_key.go
@@ -99,40 +99,36 @@ func AddPublicKey(ctx context.Context, ownerID int64, name, content string, auth
return nil, err
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return nil, err
- }
- defer committer.Close()
-
- if err := checkKeyFingerprint(ctx, fingerprint); err != nil {
- return nil, err
- }
+ return db.WithTx2(ctx, func(ctx context.Context) (*PublicKey, error) {
+ if err := checkKeyFingerprint(ctx, fingerprint); err != nil {
+ return nil, err
+ }
- // Key name of same user cannot be duplicated.
- has, err := db.GetEngine(ctx).
- Where("owner_id = ? AND name = ?", ownerID, name).
- Get(new(PublicKey))
- if err != nil {
- return nil, err
- } else if has {
- return nil, ErrKeyNameAlreadyUsed{ownerID, name}
- }
+ // Key name of same user cannot be duplicated.
+ has, err := db.GetEngine(ctx).
+ Where("owner_id = ? AND name = ?", ownerID, name).
+ Get(new(PublicKey))
+ if err != nil {
+ return nil, err
+ } else if has {
+ return nil, ErrKeyNameAlreadyUsed{ownerID, name}
+ }
- key := &PublicKey{
- OwnerID: ownerID,
- Name: name,
- Fingerprint: fingerprint,
- Content: content,
- Mode: perm.AccessModeWrite,
- Type: KeyTypeUser,
- LoginSourceID: authSourceID,
- }
- if err = addKey(ctx, key); err != nil {
- return nil, fmt.Errorf("addKey: %w", err)
- }
+ key := &PublicKey{
+ OwnerID: ownerID,
+ Name: name,
+ Fingerprint: fingerprint,
+ Content: content,
+ Mode: perm.AccessModeWrite,
+ Type: KeyTypeUser,
+ LoginSourceID: authSourceID,
+ }
+ if err = addKey(ctx, key); err != nil {
+ return nil, fmt.Errorf("addKey: %w", err)
+ }
- return key, committer.Commit()
+ return key, nil
+ })
}
// GetPublicKeyByID returns public key by given ID.
@@ -288,33 +284,24 @@ func PublicKeyIsExternallyManaged(ctx context.Context, id int64) (bool, error) {
// deleteKeysMarkedForDeletion returns true if ssh keys needs update
func deleteKeysMarkedForDeletion(ctx context.Context, keys []string) (bool, error) {
- // Start session
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return false, err
- }
- defer committer.Close()
-
- // Delete keys marked for deletion
- var sshKeysNeedUpdate bool
- for _, KeyToDelete := range keys {
- key, err := SearchPublicKeyByContent(ctx, KeyToDelete)
- if err != nil {
- log.Error("SearchPublicKeyByContent: %v", err)
- continue
- }
- if _, err = db.DeleteByID[PublicKey](ctx, key.ID); err != nil {
- log.Error("DeleteByID[PublicKey]: %v", err)
- continue
+ return db.WithTx2(ctx, func(ctx context.Context) (bool, error) {
+ // Delete keys marked for deletion
+ var sshKeysNeedUpdate bool
+ for _, KeyToDelete := range keys {
+ key, err := SearchPublicKeyByContent(ctx, KeyToDelete)
+ if err != nil {
+ log.Error("SearchPublicKeyByContent: %v", err)
+ continue
+ }
+ if _, err = db.DeleteByID[PublicKey](ctx, key.ID); err != nil {
+ log.Error("DeleteByID[PublicKey]: %v", err)
+ continue
+ }
+ sshKeysNeedUpdate = true
}
- sshKeysNeedUpdate = true
- }
- if err := committer.Commit(); err != nil {
- return false, err
- }
-
- return sshKeysNeedUpdate, nil
+ return sshKeysNeedUpdate, nil
+ })
}
// AddPublicKeysBySource add a users public keys. Returns true if there are changes.
@@ -355,13 +342,13 @@ func AddPublicKeysBySource(ctx context.Context, usr *user_model.User, s *auth.So
return sshKeysNeedUpdate
}
-// SynchronizePublicKeys updates a users public keys. Returns true if there are changes.
+// SynchronizePublicKeys updates a user's public keys. Returns true if there are changes.
func SynchronizePublicKeys(ctx context.Context, usr *user_model.User, s *auth.Source, sshPublicKeys []string) bool {
var sshKeysNeedUpdate bool
log.Trace("synchronizePublicKeys[%s]: Handling Public SSH Key synchronization for user %s", s.Name, usr.Name)
- // Get Public Keys from DB with current LDAP source
+ // Get Public Keys from DB with the current auth source
var giteaKeys []string
keys, err := db.Find[PublicKey](ctx, FindPublicKeyOptions{
OwnerID: usr.ID,
diff --git a/models/asymkey/ssh_key_commit_verification.go b/models/asymkey/ssh_key_commit_verification.go
deleted file mode 100644
index 27c6df3578..0000000000
--- a/models/asymkey/ssh_key_commit_verification.go
+++ /dev/null
@@ -1,80 +0,0 @@
-// Copyright 2021 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package asymkey
-
-import (
- "bytes"
- "context"
- "fmt"
- "strings"
-
- "code.gitea.io/gitea/models/db"
- user_model "code.gitea.io/gitea/models/user"
- "code.gitea.io/gitea/modules/git"
- "code.gitea.io/gitea/modules/log"
-
- "github.com/42wim/sshsig"
-)
-
-// ParseCommitWithSSHSignature check if signature is good against keystore.
-func ParseCommitWithSSHSignature(ctx context.Context, c *git.Commit, committer *user_model.User) *CommitVerification {
- // Now try to associate the signature with the committer, if present
- if committer.ID != 0 {
- keys, err := db.Find[PublicKey](ctx, FindPublicKeyOptions{
- OwnerID: committer.ID,
- NotKeytype: KeyTypePrincipal,
- })
- if err != nil { // Skipping failed to get ssh keys of user
- log.Error("ListPublicKeys: %v", err)
- return &CommitVerification{
- CommittingUser: committer,
- Verified: false,
- Reason: "gpg.error.failed_retrieval_gpg_keys",
- }
- }
-
- committerEmailAddresses, err := user_model.GetEmailAddresses(ctx, committer.ID)
- if err != nil {
- log.Error("GetEmailAddresses: %v", err)
- }
-
- activated := false
- for _, e := range committerEmailAddresses {
- if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) {
- activated = true
- break
- }
- }
-
- for _, k := range keys {
- if k.Verified && activated {
- commitVerification := verifySSHCommitVerification(c.Signature.Signature, c.Signature.Payload, k, committer, committer, c.Committer.Email)
- if commitVerification != nil {
- return commitVerification
- }
- }
- }
- }
-
- return &CommitVerification{
- CommittingUser: committer,
- Verified: false,
- Reason: NoKeyFound,
- }
-}
-
-func verifySSHCommitVerification(sig, payload string, k *PublicKey, committer, signer *user_model.User, email string) *CommitVerification {
- if err := sshsig.Verify(bytes.NewBuffer([]byte(payload)), []byte(sig), []byte(k.Content), "git"); err != nil {
- return nil
- }
-
- return &CommitVerification{ // Everything is ok
- CommittingUser: committer,
- Verified: true,
- Reason: fmt.Sprintf("%s / %s", signer.Name, k.Fingerprint),
- SigningUser: signer,
- SigningSSHKey: k,
- SigningEmail: email,
- }
-}
diff --git a/models/asymkey/ssh_key_deploy.go b/models/asymkey/ssh_key_deploy.go
index 923c5020ed..4ab84eabcf 100644
--- a/models/asymkey/ssh_key_deploy.go
+++ b/models/asymkey/ssh_key_deploy.go
@@ -125,39 +125,35 @@ func AddDeployKey(ctx context.Context, repoID int64, name, content string, readO
accessMode = perm.AccessModeWrite
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return nil, err
- }
- defer committer.Close()
-
- pkey, exist, err := db.Get[PublicKey](ctx, builder.Eq{"fingerprint": fingerprint})
- if err != nil {
- return nil, err
- } else if exist {
- if pkey.Type != KeyTypeDeploy {
- return nil, ErrKeyAlreadyExist{0, fingerprint, ""}
- }
- } else {
- // First time use this deploy key.
- pkey = &PublicKey{
- Fingerprint: fingerprint,
- Mode: accessMode,
- Type: KeyTypeDeploy,
- Content: content,
- Name: name,
+ return db.WithTx2(ctx, func(ctx context.Context) (*DeployKey, error) {
+ pkey, exist, err := db.Get[PublicKey](ctx, builder.Eq{"fingerprint": fingerprint})
+ if err != nil {
+ return nil, err
+ } else if exist {
+ if pkey.Type != KeyTypeDeploy {
+ return nil, ErrKeyAlreadyExist{0, fingerprint, ""}
+ }
+ } else {
+ // First time use this deploy key.
+ pkey = &PublicKey{
+ Fingerprint: fingerprint,
+ Mode: accessMode,
+ Type: KeyTypeDeploy,
+ Content: content,
+ Name: name,
+ }
+ if err = addKey(ctx, pkey); err != nil {
+ return nil, fmt.Errorf("addKey: %w", err)
+ }
}
- if err = addKey(ctx, pkey); err != nil {
- return nil, fmt.Errorf("addKey: %w", err)
- }
- }
- key, err := addDeployKey(ctx, pkey.ID, repoID, name, pkey.Fingerprint, accessMode)
- if err != nil {
- return nil, err
- }
+ key, err := addDeployKey(ctx, pkey.ID, repoID, name, pkey.Fingerprint, accessMode)
+ if err != nil {
+ return nil, err
+ }
- return key, committer.Commit()
+ return key, nil
+ })
}
// GetDeployKeyByID returns deploy key by given ID.
diff --git a/models/asymkey/ssh_key_fingerprint.go b/models/asymkey/ssh_key_fingerprint.go
index 1ed3b5df2a..b666469ae8 100644
--- a/models/asymkey/ssh_key_fingerprint.go
+++ b/models/asymkey/ssh_key_fingerprint.go
@@ -6,30 +6,16 @@ package asymkey
import (
"context"
"fmt"
- "strings"
"code.gitea.io/gitea/models/db"
- "code.gitea.io/gitea/modules/log"
- "code.gitea.io/gitea/modules/process"
- "code.gitea.io/gitea/modules/setting"
- "code.gitea.io/gitea/modules/util"
"golang.org/x/crypto/ssh"
"xorm.io/builder"
)
-// ___________.__ .__ __
-// \_ _____/|__| ____ ____ ________________________|__| _____/ |_
-// | __) | |/ \ / ___\_/ __ \_ __ \____ \_ __ \ |/ \ __\
-// | \ | | | \/ /_/ > ___/| | \/ |_> > | \/ | | \ |
-// \___ / |__|___| /\___ / \___ >__| | __/|__| |__|___| /__|
-// \/ \//_____/ \/ |__| \/
-//
-// This file contains functions for fingerprinting SSH keys
-//
-// The database is used in checkKeyFingerprint however most of these functions probably belong in a module
+// The database is used in checkKeyFingerprint. However, most of these functions probably belong in a module
-// checkKeyFingerprint only checks if key fingerprint has been used as public key,
+// checkKeyFingerprint only checks if key fingerprint has been used as a public key,
// it is OK to use same key as deploy key for multiple repositories/users.
func checkKeyFingerprint(ctx context.Context, fingerprint string) error {
has, err := db.Exist[PublicKey](ctx, builder.Eq{"fingerprint": fingerprint})
@@ -41,29 +27,6 @@ func checkKeyFingerprint(ctx context.Context, fingerprint string) error {
return nil
}
-func calcFingerprintSSHKeygen(publicKeyContent string) (string, error) {
- // Calculate fingerprint.
- tmpPath, err := writeTmpKeyFile(publicKeyContent)
- if err != nil {
- return "", err
- }
- defer func() {
- if err := util.Remove(tmpPath); err != nil {
- log.Warn("Unable to remove temporary key file: %s: Error: %v", tmpPath, err)
- }
- }()
- stdout, stderr, err := process.GetManager().Exec("AddPublicKey", "ssh-keygen", "-lf", tmpPath)
- if err != nil {
- if strings.Contains(stderr, "is not a public key file") {
- return "", ErrKeyUnableVerify{stderr}
- }
- return "", util.NewInvalidArgumentErrorf("'ssh-keygen -lf %s' failed with error '%s': %s", tmpPath, err, stderr)
- } else if len(stdout) < 2 {
- return "", util.NewInvalidArgumentErrorf("not enough output for calculating fingerprint: %s", stdout)
- }
- return strings.Split(stdout, " ")[1], nil
-}
-
func calcFingerprintNative(publicKeyContent string) (string, error) {
// Calculate fingerprint.
pk, _, _, _, err := ssh.ParseAuthorizedKey([]byte(publicKeyContent))
@@ -75,15 +38,12 @@ func calcFingerprintNative(publicKeyContent string) (string, error) {
// CalcFingerprint calculate public key's fingerprint
func CalcFingerprint(publicKeyContent string) (string, error) {
- // Call the method based on configuration
- useNative := setting.SSH.KeygenPath == ""
- calcFn := util.Iif(useNative, calcFingerprintNative, calcFingerprintSSHKeygen)
- fp, err := calcFn(publicKeyContent)
+ fp, err := calcFingerprintNative(publicKeyContent)
if err != nil {
if IsErrKeyUnableVerify(err) {
return "", err
}
- return "", fmt.Errorf("CalcFingerprint(%s): %w", util.Iif(useNative, "native", "ssh-keygen"), err)
+ return "", fmt.Errorf("CalcFingerprint: %w", err)
}
return fp, nil
}
diff --git a/models/asymkey/ssh_key_parse.go b/models/asymkey/ssh_key_parse.go
index 94b1cf112b..fc39f28624 100644
--- a/models/asymkey/ssh_key_parse.go
+++ b/models/asymkey/ssh_key_parse.go
@@ -10,14 +10,12 @@ import (
"encoding/base64"
"encoding/binary"
"encoding/pem"
+ "errors"
"fmt"
"math/big"
- "os"
- "strconv"
"strings"
"code.gitea.io/gitea/modules/log"
- "code.gitea.io/gitea/modules/process"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
@@ -93,7 +91,7 @@ func parseKeyString(content string) (string, error) {
block, _ := pem.Decode([]byte(content))
if block == nil {
- return "", fmt.Errorf("failed to parse PEM block containing the public key")
+ return "", errors.New("failed to parse PEM block containing the public key")
}
if strings.Contains(block.Type, "PRIVATE") {
return "", ErrKeyIsPrivate
@@ -174,20 +172,9 @@ func CheckPublicKeyString(content string) (_ string, err error) {
return content, nil
}
- var (
- fnName string
- keyType string
- length int
- )
- if len(setting.SSH.KeygenPath) == 0 {
- fnName = "SSHNativeParsePublicKey"
- keyType, length, err = SSHNativeParsePublicKey(content)
- } else {
- fnName = "SSHKeyGenParsePublicKey"
- keyType, length, err = SSHKeyGenParsePublicKey(content)
- }
+ keyType, length, err := SSHNativeParsePublicKey(content)
if err != nil {
- return "", fmt.Errorf("%s: %w", fnName, err)
+ return "", fmt.Errorf("SSHNativeParsePublicKey: %w", err)
}
log.Trace("Key info [native: %v]: %s-%d", setting.SSH.StartBuiltinServer, keyType, length)
@@ -221,7 +208,7 @@ func SSHNativeParsePublicKey(keyLine string) (string, int, error) {
// The ssh library can parse the key, so next we find out what key exactly we have.
switch pkey.Type() {
- case ssh.KeyAlgoDSA:
+ case ssh.KeyAlgoDSA: //nolint:staticcheck // it's deprecated
rawPub := struct {
Name string
P, Q, G, Y *big.Int
@@ -257,56 +244,3 @@ func SSHNativeParsePublicKey(keyLine string) (string, int, error) {
}
return "", 0, fmt.Errorf("unsupported key length detection for type: %s", pkey.Type())
}
-
-// writeTmpKeyFile writes key content to a temporary file
-// and returns the name of that file, along with any possible errors.
-func writeTmpKeyFile(content string) (string, error) {
- tmpFile, err := os.CreateTemp(setting.SSH.KeyTestPath, "gitea_keytest")
- if err != nil {
- return "", fmt.Errorf("TempFile: %w", err)
- }
- defer tmpFile.Close()
-
- if _, err = tmpFile.WriteString(content); err != nil {
- return "", fmt.Errorf("WriteString: %w", err)
- }
- return tmpFile.Name(), nil
-}
-
-// SSHKeyGenParsePublicKey extracts key type and length using ssh-keygen.
-func SSHKeyGenParsePublicKey(key string) (string, int, error) {
- tmpName, err := writeTmpKeyFile(key)
- if err != nil {
- return "", 0, fmt.Errorf("writeTmpKeyFile: %w", err)
- }
- defer func() {
- if err := util.Remove(tmpName); err != nil {
- log.Warn("Unable to remove temporary key file: %s: Error: %v", tmpName, err)
- }
- }()
-
- keygenPath := setting.SSH.KeygenPath
- if len(keygenPath) == 0 {
- keygenPath = "ssh-keygen"
- }
-
- stdout, stderr, err := process.GetManager().Exec("SSHKeyGenParsePublicKey", keygenPath, "-lf", tmpName)
- if err != nil {
- return "", 0, fmt.Errorf("fail to parse public key: %s - %s", err, stderr)
- }
- if strings.Contains(stdout, "is not a public key file") {
- return "", 0, ErrKeyUnableVerify{stdout}
- }
-
- fields := strings.Split(stdout, " ")
- if len(fields) < 4 {
- return "", 0, fmt.Errorf("invalid public key line: %s", stdout)
- }
-
- keyType := strings.Trim(fields[len(fields)-1], "()\r\n")
- length, err := strconv.ParseInt(fields[0], 10, 32)
- if err != nil {
- return "", 0, err
- }
- return strings.ToLower(keyType), int(length), nil
-}
diff --git a/models/asymkey/ssh_key_test.go b/models/asymkey/ssh_key_test.go
index 3650f1892f..21e4ddf62e 100644
--- a/models/asymkey/ssh_key_test.go
+++ b/models/asymkey/ssh_key_test.go
@@ -42,29 +42,7 @@ func Test_SSHParsePublicKey(t *testing.T) {
keyTypeN, lengthN, err := SSHNativeParsePublicKey(tc.content)
assert.NoError(t, err)
assert.Equal(t, tc.keyType, keyTypeN)
- assert.EqualValues(t, tc.length, lengthN)
- })
- if tc.skipSSHKeygen {
- return
- }
- t.Run("SSHKeygen", func(t *testing.T) {
- keyTypeK, lengthK, err := SSHKeyGenParsePublicKey(tc.content)
- if err != nil {
- // Some servers do not support ecdsa format.
- if !strings.Contains(err.Error(), "line 1 too long:") {
- assert.FailNow(t, "%v", err)
- }
- }
- assert.Equal(t, tc.keyType, keyTypeK)
- assert.EqualValues(t, tc.length, lengthK)
- })
- t.Run("SSHParseKeyNative", func(t *testing.T) {
- keyTypeK, lengthK, err := SSHNativeParsePublicKey(tc.content)
- if err != nil {
- assert.FailNow(t, "%v", err)
- }
- assert.Equal(t, tc.keyType, keyTypeK)
- assert.EqualValues(t, tc.length, lengthK)
+ assert.Equal(t, tc.length, lengthN)
})
})
}
@@ -186,14 +164,6 @@ func Test_calcFingerprint(t *testing.T) {
assert.NoError(t, err)
assert.Equal(t, tc.fp, fpN)
})
- if tc.skipSSHKeygen {
- return
- }
- t.Run("SSHKeygen", func(t *testing.T) {
- fpK, err := calcFingerprintSSHKeygen(tc.content)
- assert.NoError(t, err)
- assert.Equal(t, tc.fp, fpK)
- })
})
}
}
diff --git a/models/asymkey/ssh_key_verify.go b/models/asymkey/ssh_key_verify.go
index 208288c77b..04917239ee 100644
--- a/models/asymkey/ssh_key_verify.go
+++ b/models/asymkey/ssh_key_verify.go
@@ -4,8 +4,8 @@
package asymkey
import (
- "bytes"
"context"
+ "strings"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/log"
@@ -15,41 +15,33 @@ import (
// VerifySSHKey marks a SSH key as verified
func VerifySSHKey(ctx context.Context, ownerID int64, fingerprint, token, signature string) (string, error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return "", err
- }
- defer committer.Close()
-
- key := new(PublicKey)
-
- has, err := db.GetEngine(ctx).Where("owner_id = ? AND fingerprint = ?", ownerID, fingerprint).Get(key)
- if err != nil {
- return "", err
- } else if !has {
- return "", ErrKeyNotExist{}
- }
-
- err = sshsig.Verify(bytes.NewBuffer([]byte(token)), []byte(signature), []byte(key.Content), "gitea")
- if err != nil {
- // edge case for Windows based shells that will add CR LF if piped to ssh-keygen command
- // see https://github.com/PowerShell/PowerShell/issues/5974
- if sshsig.Verify(bytes.NewBuffer([]byte(token+"\r\n")), []byte(signature), []byte(key.Content), "gitea") != nil {
- log.Error("Unable to validate token signature. Error: %v", err)
- return "", ErrSSHInvalidTokenSignature{
- Fingerprint: key.Fingerprint,
- }
+ return db.WithTx2(ctx, func(ctx context.Context) (string, error) {
+ key := new(PublicKey)
+
+ has, err := db.GetEngine(ctx).Where("owner_id = ? AND fingerprint = ?", ownerID, fingerprint).Get(key)
+ if err != nil {
+ return "", err
+ } else if !has {
+ return "", ErrKeyNotExist{}
}
- }
- key.Verified = true
- if _, err := db.GetEngine(ctx).ID(key.ID).Cols("verified").Update(key); err != nil {
- return "", err
- }
+ err = sshsig.Verify(strings.NewReader(token), []byte(signature), []byte(key.Content), "gitea")
+ if err != nil {
+ // edge case for Windows based shells that will add CR LF if piped to ssh-keygen command
+ // see https://github.com/PowerShell/PowerShell/issues/5974
+ if sshsig.Verify(strings.NewReader(token+"\r\n"), []byte(signature), []byte(key.Content), "gitea") != nil {
+ log.Debug("VerifySSHKey sshsig.Verify failed: %v", err)
+ return "", ErrSSHInvalidTokenSignature{
+ Fingerprint: key.Fingerprint,
+ }
+ }
+ }
- if err := committer.Commit(); err != nil {
- return "", err
- }
+ key.Verified = true
+ if _, err := db.GetEngine(ctx).ID(key.ID).Cols("verified").Update(key); err != nil {
+ return "", err
+ }
- return key.Fingerprint, nil
+ return key.Fingerprint, nil
+ })
}
diff --git a/models/auth/access_token_scope.go b/models/auth/access_token_scope.go
index 0e5b2e96e6..3eae19b2a5 100644
--- a/models/auth/access_token_scope.go
+++ b/models/auth/access_token_scope.go
@@ -213,12 +213,7 @@ func GetRequiredScopes(level AccessTokenScopeLevel, scopeCategories ...AccessTok
// ContainsCategory checks if a list of categories contains a specific category
func ContainsCategory(categories []AccessTokenScopeCategory, category AccessTokenScopeCategory) bool {
- for _, c := range categories {
- if c == category {
- return true
- }
- }
- return false
+ return slices.Contains(categories, category)
}
// GetScopeLevelFromAccessMode converts permission access mode to scope level
@@ -295,6 +290,10 @@ func (s AccessTokenScope) Normalize() (AccessTokenScope, error) {
return bitmap.toScope(), nil
}
+func (s AccessTokenScope) HasPermissionScope() bool {
+ return s != "" && s != AccessTokenScopePublicOnly
+}
+
// PublicOnly checks if this token scope is limited to public resources
func (s AccessTokenScope) PublicOnly() (bool, error) {
bitmap, err := s.parse()
diff --git a/models/auth/access_token_scope_test.go b/models/auth/access_token_scope_test.go
index 9e4aa83633..b93c25528f 100644
--- a/models/auth/access_token_scope_test.go
+++ b/models/auth/access_token_scope_test.go
@@ -28,11 +28,11 @@ func TestAccessTokenScope_Normalize(t *testing.T) {
for _, scope := range GetAccessTokenCategories() {
tests = append(tests,
- scopeTestNormalize{AccessTokenScope(fmt.Sprintf("read:%s", scope)), AccessTokenScope(fmt.Sprintf("read:%s", scope)), nil},
- scopeTestNormalize{AccessTokenScope(fmt.Sprintf("write:%s", scope)), AccessTokenScope(fmt.Sprintf("write:%s", scope)), nil},
- scopeTestNormalize{AccessTokenScope(fmt.Sprintf("write:%[1]s,read:%[1]s", scope)), AccessTokenScope(fmt.Sprintf("write:%s", scope)), nil},
- scopeTestNormalize{AccessTokenScope(fmt.Sprintf("read:%[1]s,write:%[1]s", scope)), AccessTokenScope(fmt.Sprintf("write:%s", scope)), nil},
- scopeTestNormalize{AccessTokenScope(fmt.Sprintf("read:%[1]s,write:%[1]s,write:%[1]s", scope)), AccessTokenScope(fmt.Sprintf("write:%s", scope)), nil},
+ scopeTestNormalize{AccessTokenScope("read:" + scope), AccessTokenScope("read:" + scope), nil},
+ scopeTestNormalize{AccessTokenScope("write:" + scope), AccessTokenScope("write:" + scope), nil},
+ scopeTestNormalize{AccessTokenScope(fmt.Sprintf("write:%[1]s,read:%[1]s", scope)), AccessTokenScope("write:" + scope), nil},
+ scopeTestNormalize{AccessTokenScope(fmt.Sprintf("read:%[1]s,write:%[1]s", scope)), AccessTokenScope("write:" + scope), nil},
+ scopeTestNormalize{AccessTokenScope(fmt.Sprintf("read:%[1]s,write:%[1]s,write:%[1]s", scope)), AccessTokenScope("write:" + scope), nil},
)
}
@@ -63,20 +63,20 @@ func TestAccessTokenScope_HasScope(t *testing.T) {
for _, scope := range GetAccessTokenCategories() {
tests = append(tests,
scopeTestHasScope{
- AccessTokenScope(fmt.Sprintf("read:%s", scope)),
- AccessTokenScope(fmt.Sprintf("read:%s", scope)), true, nil,
+ AccessTokenScope("read:" + scope),
+ AccessTokenScope("read:" + scope), true, nil,
},
scopeTestHasScope{
- AccessTokenScope(fmt.Sprintf("write:%s", scope)),
- AccessTokenScope(fmt.Sprintf("write:%s", scope)), true, nil,
+ AccessTokenScope("write:" + scope),
+ AccessTokenScope("write:" + scope), true, nil,
},
scopeTestHasScope{
- AccessTokenScope(fmt.Sprintf("write:%s", scope)),
- AccessTokenScope(fmt.Sprintf("read:%s", scope)), true, nil,
+ AccessTokenScope("write:" + scope),
+ AccessTokenScope("read:" + scope), true, nil,
},
scopeTestHasScope{
- AccessTokenScope(fmt.Sprintf("read:%s", scope)),
- AccessTokenScope(fmt.Sprintf("write:%s", scope)), false, nil,
+ AccessTokenScope("read:" + scope),
+ AccessTokenScope("write:" + scope), false, nil,
},
)
}
diff --git a/models/auth/auth_token.go b/models/auth/auth_token.go
index 81f07d1a83..54ff5a0d75 100644
--- a/models/auth/auth_token.go
+++ b/models/auth/auth_token.go
@@ -15,7 +15,7 @@ import (
var ErrAuthTokenNotExist = util.NewNotExistErrorf("auth token does not exist")
-type AuthToken struct { //nolint:revive
+type AuthToken struct { //nolint:revive // export stutter
ID string `xorm:"pk"`
TokenHash string
UserID int64 `xorm:"INDEX"`
diff --git a/models/auth/oauth2.go b/models/auth/oauth2.go
index c270e4856e..d664841306 100644
--- a/models/auth/oauth2.go
+++ b/models/auth/oauth2.go
@@ -12,6 +12,7 @@ import (
"fmt"
"net"
"net/url"
+ "slices"
"strings"
"code.gitea.io/gitea/models/db"
@@ -288,35 +289,31 @@ type UpdateOAuth2ApplicationOptions struct {
// UpdateOAuth2Application updates an oauth2 application
func UpdateOAuth2Application(ctx context.Context, opts UpdateOAuth2ApplicationOptions) (*OAuth2Application, error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return nil, err
- }
- defer committer.Close()
-
- app, err := GetOAuth2ApplicationByID(ctx, opts.ID)
- if err != nil {
- return nil, err
- }
- if app.UID != opts.UserID {
- return nil, errors.New("UID mismatch")
- }
- builtinApps := BuiltinApplications()
- if _, builtin := builtinApps[app.ClientID]; builtin {
- return nil, fmt.Errorf("failed to edit OAuth2 application: application is locked: %s", app.ClientID)
- }
+ return db.WithTx2(ctx, func(ctx context.Context) (*OAuth2Application, error) {
+ app, err := GetOAuth2ApplicationByID(ctx, opts.ID)
+ if err != nil {
+ return nil, err
+ }
+ if app.UID != opts.UserID {
+ return nil, errors.New("UID mismatch")
+ }
+ builtinApps := BuiltinApplications()
+ if _, builtin := builtinApps[app.ClientID]; builtin {
+ return nil, fmt.Errorf("failed to edit OAuth2 application: application is locked: %s", app.ClientID)
+ }
- app.Name = opts.Name
- app.RedirectURIs = opts.RedirectURIs
- app.ConfidentialClient = opts.ConfidentialClient
- app.SkipSecondaryAuthorization = opts.SkipSecondaryAuthorization
+ app.Name = opts.Name
+ app.RedirectURIs = opts.RedirectURIs
+ app.ConfidentialClient = opts.ConfidentialClient
+ app.SkipSecondaryAuthorization = opts.SkipSecondaryAuthorization
- if err = updateOAuth2Application(ctx, app); err != nil {
- return nil, err
- }
- app.ClientSecret = ""
+ if err = updateOAuth2Application(ctx, app); err != nil {
+ return nil, err
+ }
+ app.ClientSecret = ""
- return app, committer.Commit()
+ return app, nil
+ })
}
func updateOAuth2Application(ctx context.Context, app *OAuth2Application) error {
@@ -357,23 +354,17 @@ func deleteOAuth2Application(ctx context.Context, id, userid int64) error {
// DeleteOAuth2Application deletes the application with the given id and the grants and auth codes related to it. It checks if the userid was the creator of the app.
func DeleteOAuth2Application(ctx context.Context, id, userid int64) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
- app, err := GetOAuth2ApplicationByID(ctx, id)
- if err != nil {
- return err
- }
- builtinApps := BuiltinApplications()
- if _, builtin := builtinApps[app.ClientID]; builtin {
- return fmt.Errorf("failed to delete OAuth2 application: application is locked: %s", app.ClientID)
- }
- if err := deleteOAuth2Application(ctx, id, userid); err != nil {
- return err
- }
- return committer.Commit()
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ app, err := GetOAuth2ApplicationByID(ctx, id)
+ if err != nil {
+ return err
+ }
+ builtinApps := BuiltinApplications()
+ if _, builtin := builtinApps[app.ClientID]; builtin {
+ return fmt.Errorf("failed to delete OAuth2 application: application is locked: %s", app.ClientID)
+ }
+ return deleteOAuth2Application(ctx, id, userid)
+ })
}
//////////////////////////////////////////////////////
@@ -511,12 +502,7 @@ func (grant *OAuth2Grant) IncreaseCounter(ctx context.Context) error {
// ScopeContains returns true if the grant scope contains the specified scope
func (grant *OAuth2Grant) ScopeContains(scope string) bool {
- for _, currentScope := range strings.Split(grant.Scope, " ") {
- if scope == currentScope {
- return true
- }
- }
- return false
+ return slices.Contains(strings.Split(grant.Scope, " "), scope)
}
// SetNonce updates the current nonce value of a grant
@@ -616,8 +602,8 @@ func (err ErrOAuthApplicationNotFound) Unwrap() error {
return util.ErrNotExist
}
-// GetActiveOAuth2SourceByName returns a OAuth2 AuthSource based on the given name
-func GetActiveOAuth2SourceByName(ctx context.Context, name string) (*Source, error) {
+// GetActiveOAuth2SourceByAuthName returns a OAuth2 AuthSource based on the given name
+func GetActiveOAuth2SourceByAuthName(ctx context.Context, name string) (*Source, error) {
authSource := new(Source)
has, err := db.GetEngine(ctx).Where("name = ? and type = ? and is_active = ?", name, OAuth2, true).Get(authSource)
if err != nil {
diff --git a/models/auth/oauth2_test.go b/models/auth/oauth2_test.go
index fa89a58b14..c6626b283e 100644
--- a/models/auth/oauth2_test.go
+++ b/models/auth/oauth2_test.go
@@ -126,7 +126,7 @@ func TestOAuth2Application_CreateGrant(t *testing.T) {
assert.NotNil(t, grant)
assert.Equal(t, int64(2), grant.UserID)
assert.Equal(t, int64(1), grant.ApplicationID)
- assert.Equal(t, "", grant.Scope)
+ assert.Empty(t, grant.Scope)
}
//////////////////// Grant
diff --git a/models/auth/session.go b/models/auth/session.go
index 75a205f702..0378d0ec6f 100644
--- a/models/auth/session.go
+++ b/models/auth/session.go
@@ -35,26 +35,22 @@ func UpdateSession(ctx context.Context, key string, data []byte) error {
// ReadSession reads the data for the provided session
func ReadSession(ctx context.Context, key string) (*Session, error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return nil, err
- }
- defer committer.Close()
-
- session, exist, err := db.Get[Session](ctx, builder.Eq{"`key`": key})
- if err != nil {
- return nil, err
- } else if !exist {
- session = &Session{
- Key: key,
- Expiry: timeutil.TimeStampNow(),
- }
- if err := db.Insert(ctx, session); err != nil {
+ return db.WithTx2(ctx, func(ctx context.Context) (*Session, error) {
+ session, exist, err := db.Get[Session](ctx, builder.Eq{"`key`": key})
+ if err != nil {
return nil, err
+ } else if !exist {
+ session = &Session{
+ Key: key,
+ Expiry: timeutil.TimeStampNow(),
+ }
+ if err := db.Insert(ctx, session); err != nil {
+ return nil, err
+ }
}
- }
- return session, committer.Commit()
+ return session, nil
+ })
}
// ExistSession checks if a session exists
@@ -72,40 +68,36 @@ func DestroySession(ctx context.Context, key string) error {
// RegenerateSession regenerates a session from the old id
func RegenerateSession(ctx context.Context, oldKey, newKey string) (*Session, error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return nil, err
- }
- defer committer.Close()
-
- if has, err := db.Exist[Session](ctx, builder.Eq{"`key`": newKey}); err != nil {
- return nil, err
- } else if has {
- return nil, fmt.Errorf("session Key: %s already exists", newKey)
- }
-
- if has, err := db.Exist[Session](ctx, builder.Eq{"`key`": oldKey}); err != nil {
- return nil, err
- } else if !has {
- if err := db.Insert(ctx, &Session{
- Key: oldKey,
- Expiry: timeutil.TimeStampNow(),
- }); err != nil {
+ return db.WithTx2(ctx, func(ctx context.Context) (*Session, error) {
+ if has, err := db.Exist[Session](ctx, builder.Eq{"`key`": newKey}); err != nil {
+ return nil, err
+ } else if has {
+ return nil, fmt.Errorf("session Key: %s already exists", newKey)
+ }
+
+ if has, err := db.Exist[Session](ctx, builder.Eq{"`key`": oldKey}); err != nil {
return nil, err
+ } else if !has {
+ if err := db.Insert(ctx, &Session{
+ Key: oldKey,
+ Expiry: timeutil.TimeStampNow(),
+ }); err != nil {
+ return nil, err
+ }
}
- }
- if _, err := db.Exec(ctx, "UPDATE "+db.TableName(&Session{})+" SET `key` = ? WHERE `key`=?", newKey, oldKey); err != nil {
- return nil, err
- }
+ if _, err := db.Exec(ctx, "UPDATE "+db.TableName(&Session{})+" SET `key` = ? WHERE `key`=?", newKey, oldKey); err != nil {
+ return nil, err
+ }
- s, _, err := db.Get[Session](ctx, builder.Eq{"`key`": newKey})
- if err != nil {
- // is not exist, it should be impossible
- return nil, err
- }
+ s, _, err := db.Get[Session](ctx, builder.Eq{"`key`": newKey})
+ if err != nil {
+ // is not exist, it should be impossible
+ return nil, err
+ }
- return s, committer.Commit()
+ return s, nil
+ })
}
// CountSessions returns the number of sessions
diff --git a/models/auth/source.go b/models/auth/source.go
index a3a250cd91..08cfc9615b 100644
--- a/models/auth/source.go
+++ b/models/auth/source.go
@@ -58,6 +58,15 @@ var Names = map[Type]string{
// Config represents login config as far as the db is concerned
type Config interface {
convert.Conversion
+ SetAuthSource(*Source)
+}
+
+type ConfigBase struct {
+ AuthSource *Source
+}
+
+func (p *ConfigBase) SetAuthSource(s *Source) {
+ p.AuthSource = s
}
// SkipVerifiable configurations provide a IsSkipVerify to check if SkipVerify is set
@@ -104,19 +113,15 @@ func RegisterTypeConfig(typ Type, exemplar Config) {
}
}
-// SourceSettable configurations can have their authSource set on them
-type SourceSettable interface {
- SetAuthSource(*Source)
-}
-
// Source represents an external way for authorizing users.
type Source struct {
- ID int64 `xorm:"pk autoincr"`
- Type Type
- Name string `xorm:"UNIQUE"`
- IsActive bool `xorm:"INDEX NOT NULL DEFAULT false"`
- IsSyncEnabled bool `xorm:"INDEX NOT NULL DEFAULT false"`
- Cfg convert.Conversion `xorm:"TEXT"`
+ ID int64 `xorm:"pk autoincr"`
+ Type Type
+ Name string `xorm:"UNIQUE"`
+ IsActive bool `xorm:"INDEX NOT NULL DEFAULT false"`
+ IsSyncEnabled bool `xorm:"INDEX NOT NULL DEFAULT false"`
+ TwoFactorPolicy string `xorm:"two_factor_policy NOT NULL DEFAULT ''"`
+ Cfg Config `xorm:"TEXT"`
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
@@ -140,9 +145,7 @@ func (source *Source) BeforeSet(colName string, val xorm.Cell) {
return
}
source.Cfg = constructor()
- if settable, ok := source.Cfg.(SourceSettable); ok {
- settable.SetAuthSource(source)
- }
+ source.Cfg.SetAuthSource(source)
}
}
@@ -200,6 +203,10 @@ func (source *Source) SkipVerify() bool {
return ok && skipVerifiable.IsSkipVerify()
}
+func (source *Source) TwoFactorShouldSkip() bool {
+ return source.TwoFactorPolicy == "skip"
+}
+
// CreateSource inserts a AuthSource in the DB if not already
// existing with the given name.
func CreateSource(ctx context.Context, source *Source) error {
@@ -223,9 +230,7 @@ func CreateSource(ctx context.Context, source *Source) error {
return nil
}
- if settable, ok := source.Cfg.(SourceSettable); ok {
- settable.SetAuthSource(source)
- }
+ source.Cfg.SetAuthSource(source)
registerableSource, ok := source.Cfg.(RegisterableSource)
if !ok {
@@ -320,9 +325,7 @@ func UpdateSource(ctx context.Context, source *Source) error {
return nil
}
- if settable, ok := source.Cfg.(SourceSettable); ok {
- settable.SetAuthSource(source)
- }
+ source.Cfg.SetAuthSource(source)
registerableSource, ok := source.Cfg.(RegisterableSource)
if !ok {
@@ -331,7 +334,7 @@ func UpdateSource(ctx context.Context, source *Source) error {
err = registerableSource.RegisterSource()
if err != nil {
- // restore original values since we cannot update the provider it self
+ // restore original values since we cannot update the provider itself
if _, err := db.GetEngine(ctx).ID(source.ID).AllCols().Update(originalSource); err != nil {
log.Error("UpdateSource: Error while wrapOpenIDConnectInitializeError: %v", err)
}
diff --git a/models/auth/source_test.go b/models/auth/source_test.go
index 84aede0a6b..64c7460b64 100644
--- a/models/auth/source_test.go
+++ b/models/auth/source_test.go
@@ -19,6 +19,8 @@ import (
)
type TestSource struct {
+ auth_model.ConfigBase
+
Provider string
ClientID string
ClientSecret string
diff --git a/models/auth/twofactor.go b/models/auth/twofactor.go
index d0c341a192..200ce7c7c0 100644
--- a/models/auth/twofactor.go
+++ b/models/auth/twofactor.go
@@ -164,3 +164,13 @@ func DeleteTwoFactorByID(ctx context.Context, id, userID int64) error {
}
return nil
}
+
+func HasTwoFactorOrWebAuthn(ctx context.Context, id int64) (bool, error) {
+ has, err := HasTwoFactorByUID(ctx, id)
+ if err != nil {
+ return false, err
+ } else if has {
+ return true, nil
+ }
+ return HasWebAuthnRegistrationsByUID(ctx, id)
+}
diff --git a/models/db/context.go b/models/db/context.go
index 4b98796ef0..ad99ada8c8 100644
--- a/models/db/context.go
+++ b/models/db/context.go
@@ -67,7 +67,7 @@ func contextSafetyCheck(e Engine) {
_ = e.SQL("SELECT 1").Iterate(&m{}, func(int, any) error {
callers := make([]uintptr, 32)
callerNum := runtime.Callers(1, callers)
- for i := 0; i < callerNum; i++ {
+ for i := range callerNum {
if funcName := runtime.FuncForPC(callers[i]).Name(); funcName == "xorm.io/xorm.(*Session).Iterate" {
contextSafetyDeniedFuncPCs = append(contextSafetyDeniedFuncPCs, callers[i])
}
@@ -82,7 +82,7 @@ func contextSafetyCheck(e Engine) {
// it should be very fast: xxxx ns/op
callers := make([]uintptr, 32)
callerNum := runtime.Callers(3, callers) // skip 3: runtime.Callers, contextSafetyCheck, GetEngine
- for i := 0; i < callerNum; i++ {
+ for i := range callerNum {
if slices.Contains(contextSafetyDeniedFuncPCs, callers[i]) {
panic(errors.New("using database context in an iterator would cause corrupted results"))
}
@@ -178,6 +178,15 @@ func WithTx(parentCtx context.Context, f func(ctx context.Context) error) error
return txWithNoCheck(parentCtx, f)
}
+// WithTx2 is similar to WithTx, but it has two return values: result and error.
+func WithTx2[T any](parentCtx context.Context, f func(ctx context.Context) (T, error)) (ret T, errRet error) {
+ errRet = WithTx(parentCtx, func(ctx context.Context) (errInner error) {
+ ret, errInner = f(ctx)
+ return errInner
+ })
+ return ret, errRet
+}
+
func txWithNoCheck(parentCtx context.Context, f func(ctx context.Context) error) error {
sess := xormEngine.NewSession()
defer sess.Close()
diff --git a/models/db/context_test.go b/models/db/context_test.go
index e8c6b74d93..a6bd11d2ae 100644
--- a/models/db/context_test.go
+++ b/models/db/context_test.go
@@ -118,7 +118,7 @@ func TestContextSafety(t *testing.T) {
})
return nil
})
- assert.EqualValues(t, testCount, actualCount)
+ assert.Equal(t, testCount, actualCount)
// deny the bad usages
assert.PanicsWithError(t, "using database context in an iterator would cause corrupted results", func() {
diff --git a/models/db/engine.go b/models/db/engine.go
index 91015f7038..ba287d58f0 100755
--- a/models/db/engine.go
+++ b/models/db/engine.go
@@ -127,7 +127,7 @@ func IsTableNotEmpty(beanOrTableName any) (bool, error) {
// DeleteAllRecords will delete all the records of this table
func DeleteAllRecords(tableName string) error {
- _, err := xormEngine.Exec(fmt.Sprintf("DELETE FROM %s", tableName))
+ _, err := xormEngine.Exec("DELETE FROM " + tableName)
return err
}
diff --git a/models/db/engine_init.go b/models/db/engine_init.go
index 7a071fa29b..bb02aff274 100644
--- a/models/db/engine_init.go
+++ b/models/db/engine_init.go
@@ -42,9 +42,10 @@ func newXORMEngine() (*xorm.Engine, error) {
if err != nil {
return nil, err
}
- if setting.Database.Type == "mysql" {
+ switch setting.Database.Type {
+ case "mysql":
engine.Dialect().SetParams(map[string]string{"rowFormat": "DYNAMIC"})
- } else if setting.Database.Type == "mssql" {
+ case "mssql":
engine.Dialect().SetParams(map[string]string{"DEFAULT_VARCHAR": "nvarchar"})
}
engine.SetSchema(setting.Database.Schema)
diff --git a/models/db/engine_test.go b/models/db/engine_test.go
index 10a1a33ff0..a236f83735 100644
--- a/models/db/engine_test.go
+++ b/models/db/engine_test.go
@@ -52,7 +52,7 @@ func TestDeleteOrphanedObjects(t *testing.T) {
countAfter, err := db.GetEngine(db.DefaultContext).Count(&issues_model.PullRequest{})
assert.NoError(t, err)
- assert.EqualValues(t, countBefore, countAfter)
+ assert.Equal(t, countBefore, countAfter)
}
func TestPrimaryKeys(t *testing.T) {
diff --git a/models/db/error.go b/models/db/error.go
index 665e970e17..d47c7adac4 100644
--- a/models/db/error.go
+++ b/models/db/error.go
@@ -65,7 +65,7 @@ func (err ErrNotExist) Error() string {
if err.ID != 0 {
return fmt.Sprintf("%s does not exist [id: %d]", name, err.ID)
}
- return fmt.Sprintf("%s does not exist", name)
+ return name + " does not exist"
}
// Unwrap unwraps this as a ErrNotExist err
diff --git a/models/db/list_test.go b/models/db/list_test.go
index 45194611f8..170473a968 100644
--- a/models/db/list_test.go
+++ b/models/db/list_test.go
@@ -47,6 +47,6 @@ func TestFind(t *testing.T) {
repoUnits, newCnt, err := db.FindAndCount[repo_model.RepoUnit](db.DefaultContext, opts)
assert.NoError(t, err)
- assert.EqualValues(t, cnt, newCnt)
+ assert.Equal(t, cnt, newCnt)
assert.Len(t, repoUnits, repoUnitCount)
}
diff --git a/models/db/name.go b/models/db/name.go
index 0e11c78372..48c7fdbce5 100644
--- a/models/db/name.go
+++ b/models/db/name.go
@@ -5,6 +5,7 @@ package db
import (
"fmt"
+ "slices"
"strings"
"unicode/utf8"
@@ -80,10 +81,8 @@ func IsUsableName(reservedNames, reservedPatterns []string, name string) error {
return util.NewInvalidArgumentErrorf("name is empty")
}
- for i := range reservedNames {
- if name == reservedNames[i] {
- return ErrNameReserved{name}
- }
+ if slices.Contains(reservedNames, name) {
+ return ErrNameReserved{name}
}
for _, pat := range reservedPatterns {
diff --git a/models/db/search.go b/models/db/search.go
index e0a1b6bde9..44d54f21fc 100644
--- a/models/db/search.go
+++ b/models/db/search.go
@@ -29,7 +29,3 @@ const (
// NoConditionID means a condition to filter the records which don't match any id.
// eg: "milestone_id=-1" means "find the items without any milestone.
const NoConditionID int64 = -1
-
-// NonExistingID means a condition to match no result (eg: a non-existing user)
-// It doesn't use -1 or -2 because they are used as builtin users.
-const NonExistingID int64 = -1000000
diff --git a/models/db/sql_postgres_with_schema.go b/models/db/sql_postgres_with_schema.go
index 64b61b2ef3..812fe4a6a6 100644
--- a/models/db/sql_postgres_with_schema.go
+++ b/models/db/sql_postgres_with_schema.go
@@ -39,7 +39,7 @@ func (d *postgresSchemaDriver) Open(name string) (driver.Conn, error) {
// golangci lint is incorrect here - there is no benefit to using driver.ExecerContext here
// and in any case pq does not implement it
- if execer, ok := conn.(driver.Execer); ok { //nolint:staticcheck
+ if execer, ok := conn.(driver.Execer); ok { //nolint:staticcheck // see above
_, err := execer.Exec(`SELECT set_config(
'search_path',
$1 || ',' || current_setting('search_path'),
@@ -64,7 +64,7 @@ func (d *postgresSchemaDriver) Open(name string) (driver.Conn, error) {
// driver.String.ConvertValue will never return err for string
// golangci lint is incorrect here - there is no benefit to using stmt.ExecWithContext here
- _, err = stmt.Exec([]driver.Value{schemaValue}) //nolint:staticcheck
+ _, err = stmt.Exec([]driver.Value{schemaValue}) //nolint:staticcheck // see above
if err != nil {
_ = conn.Close()
return nil, err
diff --git a/models/dbfs/dbfile.go b/models/dbfs/dbfile.go
index dd27b5c36b..eaf506fbe6 100644
--- a/models/dbfs/dbfile.go
+++ b/models/dbfs/dbfile.go
@@ -46,10 +46,7 @@ func (f *file) readAt(fileMeta *dbfsMeta, offset int64, p []byte) (n int, err er
blobPos := int(offset % f.blockSize)
blobOffset := offset - int64(blobPos)
blobRemaining := int(f.blockSize) - blobPos
- needRead := len(p)
- if needRead > blobRemaining {
- needRead = blobRemaining
- }
+ needRead := min(len(p), blobRemaining)
if blobOffset+int64(blobPos)+int64(needRead) > fileMeta.FileSize {
needRead = int(fileMeta.FileSize - blobOffset - int64(blobPos))
}
@@ -66,14 +63,8 @@ func (f *file) readAt(fileMeta *dbfsMeta, offset int64, p []byte) (n int, err er
blobData = nil
}
- canCopy := len(blobData) - blobPos
- if canCopy <= 0 {
- canCopy = 0
- }
- realRead := needRead
- if realRead > canCopy {
- realRead = canCopy
- }
+ canCopy := max(len(blobData)-blobPos, 0)
+ realRead := min(needRead, canCopy)
if realRead > 0 {
copy(p[:realRead], fileData.BlobData[blobPos:blobPos+realRead])
}
@@ -113,10 +104,7 @@ func (f *file) Write(p []byte) (n int, err error) {
blobPos := int(f.offset % f.blockSize)
blobOffset := f.offset - int64(blobPos)
blobRemaining := int(f.blockSize) - blobPos
- needWrite := len(p)
- if needWrite > blobRemaining {
- needWrite = blobRemaining
- }
+ needWrite := min(len(p), blobRemaining)
buf := make([]byte, f.blockSize)
readBytes, err := f.readAt(fileMeta, blobOffset, buf)
if err != nil && !errors.Is(err, io.EOF) {
diff --git a/models/dbfs/dbfs_test.go b/models/dbfs/dbfs_test.go
index 96cb1014c7..0257d2bd15 100644
--- a/models/dbfs/dbfs_test.go
+++ b/models/dbfs/dbfs_test.go
@@ -31,15 +31,15 @@ func TestDbfsBasic(t *testing.T) {
n, err := f.Write([]byte("0123456789")) // blocks: 0123 4567 89
assert.NoError(t, err)
- assert.EqualValues(t, 10, n)
+ assert.Equal(t, 10, n)
_, err = f.Seek(0, io.SeekStart)
assert.NoError(t, err)
buf, err := io.ReadAll(f)
assert.NoError(t, err)
- assert.EqualValues(t, 10, n)
- assert.EqualValues(t, "0123456789", string(buf))
+ assert.Equal(t, 10, n)
+ assert.Equal(t, "0123456789", string(buf))
// write some new data
_, err = f.Seek(1, io.SeekStart)
@@ -50,14 +50,14 @@ func TestDbfsBasic(t *testing.T) {
// read from offset
buf, err = io.ReadAll(f)
assert.NoError(t, err)
- assert.EqualValues(t, "9", string(buf))
+ assert.Equal(t, "9", string(buf))
// read all
_, err = f.Seek(0, io.SeekStart)
assert.NoError(t, err)
buf, err = io.ReadAll(f)
assert.NoError(t, err)
- assert.EqualValues(t, "0bcdefghi9", string(buf))
+ assert.Equal(t, "0bcdefghi9", string(buf))
// write to new size
_, err = f.Seek(-1, io.SeekEnd)
@@ -68,7 +68,7 @@ func TestDbfsBasic(t *testing.T) {
assert.NoError(t, err)
buf, err = io.ReadAll(f)
assert.NoError(t, err)
- assert.EqualValues(t, "0bcdefghiJKLMNOP", string(buf))
+ assert.Equal(t, "0bcdefghiJKLMNOP", string(buf))
// write beyond EOF and fill with zero
_, err = f.Seek(5, io.SeekCurrent)
@@ -79,7 +79,7 @@ func TestDbfsBasic(t *testing.T) {
assert.NoError(t, err)
buf, err = io.ReadAll(f)
assert.NoError(t, err)
- assert.EqualValues(t, "0bcdefghiJKLMNOP\x00\x00\x00\x00\x00xyzu", string(buf))
+ assert.Equal(t, "0bcdefghiJKLMNOP\x00\x00\x00\x00\x00xyzu", string(buf))
// write to the block with zeros
_, err = f.Seek(-6, io.SeekCurrent)
@@ -90,7 +90,7 @@ func TestDbfsBasic(t *testing.T) {
assert.NoError(t, err)
buf, err = io.ReadAll(f)
assert.NoError(t, err)
- assert.EqualValues(t, "0bcdefghiJKLMNOP\x00\x00\x00ABCDzu", string(buf))
+ assert.Equal(t, "0bcdefghiJKLMNOP\x00\x00\x00ABCDzu", string(buf))
assert.NoError(t, f.Close())
@@ -117,7 +117,7 @@ func TestDbfsBasic(t *testing.T) {
assert.NoError(t, err)
stat, err := f.Stat()
assert.NoError(t, err)
- assert.EqualValues(t, "test.txt", stat.Name())
+ assert.Equal(t, "test.txt", stat.Name())
assert.EqualValues(t, 0, stat.Size())
_, err = f.Write([]byte("0123456789"))
assert.NoError(t, err)
@@ -144,7 +144,7 @@ func TestDbfsReadWrite(t *testing.T) {
line, err := f2r.ReadString('\n')
assert.NoError(t, err)
- assert.EqualValues(t, "line 1\n", line)
+ assert.Equal(t, "line 1\n", line)
_, err = f2r.ReadString('\n')
assert.ErrorIs(t, err, io.EOF)
@@ -153,7 +153,7 @@ func TestDbfsReadWrite(t *testing.T) {
line, err = f2r.ReadString('\n')
assert.NoError(t, err)
- assert.EqualValues(t, "line 2\n", line)
+ assert.Equal(t, "line 2\n", line)
_, err = f2r.ReadString('\n')
assert.ErrorIs(t, err, io.EOF)
}
@@ -186,5 +186,5 @@ func TestDbfsSeekWrite(t *testing.T) {
buf, err := io.ReadAll(fr)
assert.NoError(t, err)
- assert.EqualValues(t, "111333", string(buf))
+ assert.Equal(t, "111333", string(buf))
}
diff --git a/models/fixtures/action_artifact.yml b/models/fixtures/action_artifact.yml
index 485474108f..ee8ef0d5ce 100644
--- a/models/fixtures/action_artifact.yml
+++ b/models/fixtures/action_artifact.yml
@@ -11,6 +11,24 @@
content_encoding: ""
artifact_path: "abc.txt"
artifact_name: "artifact-download"
+ status: 2
+ created_unix: 1712338649
+ updated_unix: 1712338649
+ expired_unix: 1720114649
+
+-
+ id: 2
+ run_id: 791
+ runner_id: 1
+ repo_id: 4
+ owner_id: 1
+ commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
+ storage_path: ""
+ file_size: 1024
+ file_compressed_size: 1024
+ content_encoding: "30/20/1712348022422036662.chunk"
+ artifact_path: "abc.txt"
+ artifact_name: "artifact-download-incomplete"
status: 1
created_unix: 1712338649
updated_unix: 1712338649
@@ -87,3 +105,39 @@
created_unix: 1730330775
updated_unix: 1730330775
expired_unix: 1738106775
+
+-
+ id: 24
+ run_id: 795
+ runner_id: 1
+ repo_id: 2
+ owner_id: 2
+ commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
+ storage_path: "27/5/1730330775594233150.chunk"
+ file_size: 1024
+ file_compressed_size: 1024
+ content_encoding: "application/zip"
+ artifact_path: "artifact-795-1.zip"
+ artifact_name: "artifact-795-1"
+ status: 2
+ created_unix: 1730330775
+ updated_unix: 1730330775
+ expired_unix: 1738106775
+
+-
+ id: 25
+ run_id: 795
+ runner_id: 1
+ repo_id: 2
+ owner_id: 2
+ commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
+ storage_path: "27/5/1730330775594233150.chunk"
+ file_size: 1024
+ file_compressed_size: 1024
+ content_encoding: "application/zip"
+ artifact_path: "artifact-795-2.zip"
+ artifact_name: "artifact-795-2"
+ status: 2
+ created_unix: 1730330775
+ updated_unix: 1730330775
+ expired_unix: 1738106775
diff --git a/models/fixtures/action_run.yml b/models/fixtures/action_run.yml
index 1db849352f..09dfa6cccb 100644
--- a/models/fixtures/action_run.yml
+++ b/models/fixtures/action_run.yml
@@ -9,6 +9,7 @@
ref: "refs/heads/master"
commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0"
event: "push"
+ trigger_event: "push"
is_fork_pull_request: 0
status: 1
started: 1683636528
@@ -28,6 +29,7 @@
ref: "refs/heads/master"
commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0"
event: "push"
+ trigger_event: "push"
is_fork_pull_request: 0
status: 1
started: 1683636528
@@ -47,8 +49,9 @@
ref: "refs/heads/master"
commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0"
event: "push"
+ trigger_event: "push"
is_fork_pull_request: 0
- status: 1
+ status: 6 # running
started: 1683636528
stopped: 1683636626
created: 1683636108
@@ -66,6 +69,47 @@
ref: "refs/heads/test"
commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0"
event: "push"
+ trigger_event: "push"
+ is_fork_pull_request: 0
+ status: 1
+ started: 1683636528
+ stopped: 1683636626
+ created: 1683636108
+ updated: 1683636626
+ need_approval: 0
+ approved_by: 0
+-
+ id: 802
+ title: "workflow run list"
+ repo_id: 5
+ owner_id: 3
+ workflow_id: "test.yaml"
+ index: 191
+ trigger_user_id: 1
+ ref: "refs/heads/test"
+ commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0"
+ event: "push"
+ trigger_event: "push"
+ is_fork_pull_request: 0
+ status: 1
+ started: 1683636528
+ stopped: 1683636626
+ created: 1683636108
+ updated: 1683636626
+ need_approval: 0
+ approved_by: 0
+-
+ id: 803
+ title: "workflow run list for user"
+ repo_id: 2
+ owner_id: 0
+ workflow_id: "test.yaml"
+ index: 192
+ trigger_user_id: 1
+ ref: "refs/heads/test"
+ commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0"
+ event: "push"
+ trigger_event: "push"
is_fork_pull_request: 0
status: 1
started: 1683636528
@@ -74,3 +118,24 @@
updated: 1683636626
need_approval: 0
approved_by: 0
+
+-
+ id: 795
+ title: "to be deleted (test)"
+ repo_id: 2
+ owner_id: 2
+ workflow_id: "test.yaml"
+ index: 191
+ trigger_user_id: 1
+ ref: "refs/heads/test"
+ commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0"
+ event: "push"
+ trigger_event: "push"
+ is_fork_pull_request: 0
+ status: 2
+ started: 1683636528
+ stopped: 1683636626
+ created: 1683636108
+ updated: 1683636626
+ need_approval: 0
+ approved_by: 0
diff --git a/models/fixtures/action_run_job.yml b/models/fixtures/action_run_job.yml
index 8837e6ec2d..6c06d94aa4 100644
--- a/models/fixtures/action_run_job.yml
+++ b/models/fixtures/action_run_job.yml
@@ -69,3 +69,63 @@
status: 5
started: 1683636528
stopped: 1683636626
+
+-
+ id: 198
+ run_id: 795
+ repo_id: 2
+ owner_id: 2
+ commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
+ is_fork_pull_request: 0
+ name: job_1
+ attempt: 1
+ job_id: job_1
+ task_id: 53
+ status: 1
+ started: 1683636528
+ stopped: 1683636626
+
+-
+ id: 199
+ run_id: 795
+ repo_id: 2
+ owner_id: 2
+ commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
+ is_fork_pull_request: 0
+ name: job_2
+ attempt: 1
+ job_id: job_2
+ task_id: 54
+ status: 2
+ started: 1683636528
+ stopped: 1683636626
+-
+ id: 203
+ run_id: 802
+ repo_id: 5
+ owner_id: 0
+ commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
+ is_fork_pull_request: 0
+ name: job2
+ attempt: 1
+ job_id: job2
+ needs: '["job1"]'
+ task_id: 51
+ status: 5
+ started: 1683636528
+ stopped: 1683636626
+-
+ id: 204
+ run_id: 803
+ repo_id: 2
+ owner_id: 0
+ commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
+ is_fork_pull_request: 0
+ name: job2
+ attempt: 1
+ job_id: job2
+ needs: '["job1"]'
+ task_id: 51
+ status: 5
+ started: 1683636528
+ stopped: 1683636626
diff --git a/models/fixtures/action_runner.yml b/models/fixtures/action_runner.yml
new file mode 100644
index 0000000000..ecb7214006
--- /dev/null
+++ b/models/fixtures/action_runner.yml
@@ -0,0 +1,51 @@
+-
+ id: 34346
+ name: runner_to_be_deleted-user
+ uuid: 3EF231BD-FBB7-4E4B-9602-E6F28363EF18
+ token_hash: 3EF231BD-FBB7-4E4B-9602-E6F28363EF18
+ version: "1.0.0"
+ owner_id: 1
+ repo_id: 0
+ description: "This runner is going to be deleted"
+ agent_labels: '["runner_to_be_deleted","linux"]'
+-
+ id: 34347
+ name: runner_to_be_deleted-org
+ uuid: 3EF231BD-FBB7-4E4B-9602-E6F28363EF19
+ token_hash: 3EF231BD-FBB7-4E4B-9602-E6F28363EF19
+ version: "1.0.0"
+ owner_id: 3
+ repo_id: 0
+ description: "This runner is going to be deleted"
+ agent_labels: '["runner_to_be_deleted","linux"]'
+-
+ id: 34348
+ name: runner_to_be_deleted-repo1
+ uuid: 3EF231BD-FBB7-4E4B-9602-E6F28363EF20
+ token_hash: 3EF231BD-FBB7-4E4B-9602-E6F28363EF20
+ version: "1.0.0"
+ owner_id: 0
+ repo_id: 1
+ description: "This runner is going to be deleted"
+ agent_labels: '["runner_to_be_deleted","linux"]'
+-
+ id: 34349
+ name: runner_to_be_deleted
+ uuid: 3EF231BD-FBB7-4E4B-9602-E6F28363EF17
+ token_hash: 3EF231BD-FBB7-4E4B-9602-E6F28363EF17
+ version: "1.0.0"
+ owner_id: 0
+ repo_id: 0
+ description: "This runner is going to be deleted"
+ agent_labels: '["runner_to_be_deleted","linux"]'
+-
+ id: 34350
+ name: runner_to_be_deleted-org-ephemeral
+ uuid: 3FF231BD-FBB7-4E4B-9602-E6F28363EF20
+ token_hash: 3FF231BD-FBB7-4E4B-9602-E6F28363EF20
+ ephemeral: true
+ version: "1.0.0"
+ owner_id: 3
+ repo_id: 0
+ description: "This runner is going to be deleted"
+ agent_labels: '["runner_to_be_deleted","linux"]'
diff --git a/models/fixtures/action_task.yml b/models/fixtures/action_task.yml
index 506a47d8a0..c79fb07050 100644
--- a/models/fixtures/action_task.yml
+++ b/models/fixtures/action_task.yml
@@ -117,3 +117,63 @@
log_length: 707
log_size: 90179
log_expired: 0
+-
+ id: 52
+ job_id: 196
+ attempt: 1
+ runner_id: 34350
+ status: 6 # running
+ started: 1683636528
+ stopped: 1683636626
+ repo_id: 4
+ owner_id: 1
+ commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
+ is_fork_pull_request: 0
+ token_hash: f8d3962425466b6709b9ac51446f93260c54afe8e7b6d3686e34f991fb8a8953822b0deed86fe41a103f34bc48dbc4784222
+ token_salt: ffffffffff
+ token_last_eight: ffffffff
+ log_filename: artifact-test2/2f/47.log
+ log_in_storage: 1
+ log_length: 707
+ log_size: 90179
+ log_expired: 0
+-
+ id: 53
+ job_id: 198
+ attempt: 1
+ runner_id: 1
+ status: 1
+ started: 1683636528
+ stopped: 1683636626
+ repo_id: 2
+ owner_id: 2
+ commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
+ is_fork_pull_request: 0
+ token_hash: b8d3962425466b6709b9ac51446f93260c54afe8e7b6d3686e34f991fb8a8953822b0deed86fe41a103f34bc48dbc4784223
+ token_salt: ffffffffff
+ token_last_eight: ffffffff
+ log_filename: artifact-test2/2f/47.log
+ log_in_storage: 1
+ log_length: 0
+ log_size: 0
+ log_expired: 0
+-
+ id: 54
+ job_id: 199
+ attempt: 1
+ runner_id: 1
+ status: 2
+ started: 1683636528
+ stopped: 1683636626
+ repo_id: 2
+ owner_id: 2
+ commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
+ is_fork_pull_request: 0
+ token_hash: b8d3962425466b6709b9ac51446f93260c54afe8e7b6d3686e34f991fb8a8953822b0deed86fe41a103f34bc48dbc4784224
+ token_salt: ffffffffff
+ token_last_eight: ffffffff
+ log_filename: artifact-test2/2f/47.log
+ log_in_storage: 1
+ log_length: 0
+ log_size: 0
+ log_expired: 0
diff --git a/models/fixtures/branch.yml b/models/fixtures/branch.yml
index 17b1869ab6..03e21d04b4 100644
--- a/models/fixtures/branch.yml
+++ b/models/fixtures/branch.yml
@@ -93,3 +93,123 @@
is_deleted: false
deleted_by_id: 0
deleted_unix: 0
+
+-
+ id: 16
+ repo_id: 16
+ name: 'master'
+ commit_id: '69554a64c1e6030f051e5c3f94bfbd773cd6a324'
+ commit_message: 'not signed commit'
+ commit_time: 1502042309
+ pusher_id: 2
+ is_deleted: false
+ deleted_by_id: 0
+ deleted_unix: 0
+
+-
+ id: 17
+ repo_id: 16
+ name: 'not-signed'
+ commit_id: '69554a64c1e6030f051e5c3f94bfbd773cd6a324'
+ commit_message: 'not signed commit'
+ commit_time: 1502042309
+ pusher_id: 2
+ is_deleted: false
+ deleted_by_id: 0
+ deleted_unix: 0
+
+-
+ id: 18
+ repo_id: 16
+ name: 'good-sign-not-yet-validated'
+ commit_id: '27566bd5738fc8b4e3fef3c5e72cce608537bd95'
+ commit_message: 'good signed commit (with not yet validated email)'
+ commit_time: 1502042234
+ pusher_id: 2
+ is_deleted: false
+ deleted_by_id: 0
+ deleted_unix: 0
+
+-
+ id: 19
+ repo_id: 16
+ name: 'good-sign'
+ commit_id: 'f27c2b2b03dcab38beaf89b0ab4ff61f6de63441'
+ commit_message: 'good signed commit'
+ commit_time: 1502042101
+ pusher_id: 2
+ is_deleted: false
+ deleted_by_id: 0
+ deleted_unix: 0
+
+-
+ id: 20
+ repo_id: 1
+ name: 'feature/1'
+ commit_id: '65f1bf27bc3bf70f64657658635e66094edbcb4d'
+ commit_message: 'Initial commit'
+ commit_time: 1489950479
+ pusher_id: 2
+ is_deleted: false
+ deleted_by_id: 0
+ deleted_unix: 0
+
+-
+ id: 21
+ repo_id: 49
+ name: 'master'
+ commit_id: 'aacbdfe9e1c4b47f60abe81849045fa4e96f1d75'
+ commit_message: "Add 'test/test.txt'"
+ commit_time: 1572535577
+ pusher_id: 2
+ is_deleted: false
+ deleted_by_id: 0
+ deleted_unix: 0
+
+-
+ id: 22
+ repo_id: 1
+ name: 'develop'
+ commit_id: '65f1bf27bc3bf70f64657658635e66094edbcb4d'
+ commit_message: "Initial commit"
+ commit_time: 1489927679
+ pusher_id: 1
+ is_deleted: false
+ deleted_by_id: 0
+ deleted_unix: 0
+
+-
+ id: 23
+ repo_id: 3
+ name: 'master'
+ commit_id: '2a47ca4b614a9f5a43abbd5ad851a54a616ffee6'
+ commit_message: "init project"
+ commit_time: 1497448461
+ pusher_id: 1
+ is_deleted: false
+ deleted_by_id: 0
+ deleted_unix: 0
+
+-
+ id: 24
+ repo_id: 3
+ name: 'test_branch'
+ commit_id: 'd22b4d4daa5be07329fcef6ed458f00cf3392da0'
+ commit_message: "test commit"
+ commit_time: 1602935385
+ pusher_id: 1
+ is_deleted: false
+ deleted_by_id: 0
+ deleted_unix: 0
+
+-
+ id: 25
+ repo_id: 54
+ name: 'master'
+ commit_id: '73cf03db6ece34e12bf91e8853dc58f678f2f82d'
+ commit_message: 'Initial commit'
+ commit_time: 1671663402
+ pusher_id: 2
+ is_deleted: false
+ deleted_by_id: 0
+ deleted_unix: 0
diff --git a/models/fixtures/commit_status.yml b/models/fixtures/commit_status.yml
index 20d57975ef..87c652e53a 100644
--- a/models/fixtures/commit_status.yml
+++ b/models/fixtures/commit_status.yml
@@ -7,6 +7,7 @@
target_url: https://example.com/builds/
description: My awesome CI-service
context: ci/awesomeness
+ context_hash: c65f4d64a3b14a3eced0c9b36799e66e1bd5ced7
creator_id: 2
-
@@ -18,6 +19,7 @@
target_url: https://example.com/converage/
description: My awesome Coverage service
context: cov/awesomeness
+ context_hash: 3929ac7bccd3fa1bf9b38ddedb77973b1b9a8cfe
creator_id: 2
-
@@ -29,6 +31,7 @@
target_url: https://example.com/converage/
description: My awesome Coverage service
context: cov/awesomeness
+ context_hash: 3929ac7bccd3fa1bf9b38ddedb77973b1b9a8cfe
creator_id: 2
-
@@ -40,6 +43,7 @@
target_url: https://example.com/builds/
description: My awesome CI-service
context: ci/awesomeness
+ context_hash: c65f4d64a3b14a3eced0c9b36799e66e1bd5ced7
creator_id: 2
-
@@ -51,4 +55,5 @@
target_url: https://example.com/builds/
description: My awesome deploy service
context: deploy/awesomeness
+ context_hash: ae9547713a6665fc4261d0756904932085a41cf2
creator_id: 2
diff --git a/models/fixtures/email_address.yml b/models/fixtures/email_address.yml
index b2a0432635..0f6bd9ee6d 100644
--- a/models/fixtures/email_address.yml
+++ b/models/fixtures/email_address.yml
@@ -81,7 +81,7 @@
-
id: 11
uid: 4
- email: user4@example.com
+ email: User4@Example.Com
lower_email: user4@example.com
is_activated: true
is_primary: true
diff --git a/models/fixtures/hook_task.yml b/models/fixtures/hook_task.yml
index d573406b36..6023719b1e 100644
--- a/models/fixtures/hook_task.yml
+++ b/models/fixtures/hook_task.yml
@@ -18,7 +18,7 @@
id: 2
hook_id: 1
uuid: uuid2
- is_delivered: false
+ is_delivered: true
-
id: 3
diff --git a/models/fixtures/public_key.yml b/models/fixtures/public_key.yml
index ae620ee2d1..856b0e3fb2 100644
--- a/models/fixtures/public_key.yml
+++ b/models/fixtures/public_key.yml
@@ -9,3 +9,4 @@
created_unix: 1559593109
updated_unix: 1565224552
login_source_id: 0
+ verified: false
diff --git a/models/fixtures/repo_transfer.yml b/models/fixtures/repo_transfer.yml
index db92c95248..b12e6b207f 100644
--- a/models/fixtures/repo_transfer.yml
+++ b/models/fixtures/repo_transfer.yml
@@ -21,3 +21,11 @@
repo_id: 32
created_unix: 1553610671
updated_unix: 1553610671
+
+-
+ id: 4
+ doer_id: 3
+ recipient_id: 1
+ repo_id: 5
+ created_unix: 1553610671
+ updated_unix: 1553610671
diff --git a/models/fixtures/webhook.yml b/models/fixtures/webhook.yml
index ebc4062b60..ec282914b8 100644
--- a/models/fixtures/webhook.yml
+++ b/models/fixtures/webhook.yml
@@ -1,7 +1,7 @@
-
id: 1
repo_id: 1
- url: www.example.com/url1
+ url: https://www.example.com/url1
content_type: 1 # json
events: '{"push_only":true,"send_everything":false,"choose_events":false,"events":{"create":false,"push":true,"pull_request":false}}'
is_active: true
@@ -9,7 +9,7 @@
-
id: 2
repo_id: 1
- url: www.example.com/url2
+ url: https://www.example.com/url2
content_type: 1 # json
events: '{"push_only":false,"send_everything":false,"choose_events":false,"events":{"create":false,"push":true,"pull_request":true}}'
is_active: false
@@ -18,7 +18,7 @@
id: 3
owner_id: 3
repo_id: 3
- url: www.example.com/url3
+ url: https://www.example.com/url3
content_type: 1 # json
events: '{"push_only":false,"send_everything":false,"choose_events":false,"events":{"create":false,"push":true,"pull_request":true}}'
is_active: true
@@ -26,7 +26,7 @@
-
id: 4
repo_id: 2
- url: www.example.com/url4
+ url: https://www.example.com/url4
content_type: 1 # json
events: '{"push_only":true,"branch_filter":"{master,feature*}"}'
is_active: true
@@ -35,7 +35,7 @@
id: 5
repo_id: 0
owner_id: 0
- url: www.example.com/url5
+ url: https://www.example.com/url5
content_type: 1 # json
events: '{"push_only":true,"branch_filter":"{master,feature*}"}'
is_active: true
@@ -45,7 +45,7 @@
id: 6
repo_id: 0
owner_id: 0
- url: www.example.com/url6
+ url: https://www.example.com/url6
content_type: 1 # json
events: '{"push_only":true,"branch_filter":"{master,feature*}"}'
is_active: true
diff --git a/models/git/branch.go b/models/git/branch.go
index d1caa35947..6021e1101f 100644
--- a/models/git/branch.go
+++ b/models/git/branch.go
@@ -173,6 +173,18 @@ func GetBranch(ctx context.Context, repoID int64, branchName string) (*Branch, e
return &branch, nil
}
+// IsBranchExist returns true if the branch exists in the repository.
+func IsBranchExist(ctx context.Context, repoID int64, branchName string) (bool, error) {
+ var branch Branch
+ has, err := db.GetEngine(ctx).Where("repo_id=?", repoID).And("name=?", branchName).Get(&branch)
+ if err != nil {
+ return false, err
+ } else if !has {
+ return false, nil
+ }
+ return !branch.IsDeleted, nil
+}
+
func GetBranches(ctx context.Context, repoID int64, branchNames []string, includeDeleted bool) ([]*Branch, error) {
branches := make([]*Branch, 0, len(branchNames))
@@ -223,6 +235,11 @@ func GetDeletedBranchByID(ctx context.Context, repoID, branchID int64) (*Branch,
return &branch, nil
}
+func DeleteRepoBranches(ctx context.Context, repoID int64) error {
+ _, err := db.GetEngine(ctx).Where("repo_id=?", repoID).Delete(new(Branch))
+ return err
+}
+
func DeleteBranches(ctx context.Context, repoID, doerID int64, branchIDs []int64) error {
return db.WithTx(ctx, func(ctx context.Context) error {
branches := make([]*Branch, 0, len(branchIDs))
@@ -455,7 +472,7 @@ type RecentlyPushedNewBranch struct {
// if opts.CommitAfterUnix is 0, we will find the branches that were committed to in the last 2 hours
// if opts.ListOptions is not set, we will only display top 2 latest branches.
// Protected branches will be skipped since they are unlikely to be used to create new PRs.
-func FindRecentlyPushedNewBranches(ctx context.Context, doer *user_model.User, opts *FindRecentlyPushedNewBranchesOptions) ([]*RecentlyPushedNewBranch, error) {
+func FindRecentlyPushedNewBranches(ctx context.Context, doer *user_model.User, opts FindRecentlyPushedNewBranchesOptions) ([]*RecentlyPushedNewBranch, error) {
if doer == nil {
return []*RecentlyPushedNewBranch{}, nil
}
@@ -470,7 +487,7 @@ func FindRecentlyPushedNewBranches(ctx context.Context, doer *user_model.User, o
ForkFrom: opts.BaseRepo.ID,
Archived: optional.Some(false),
}
- repoCond := repo_model.SearchRepositoryCondition(&repoOpts).And(repo_model.AccessibleRepositoryCondition(doer, unit.TypeCode))
+ repoCond := repo_model.SearchRepositoryCondition(repoOpts).And(repo_model.AccessibleRepositoryCondition(doer, unit.TypeCode))
if opts.Repo.ID == opts.BaseRepo.ID {
// should also include the base repo's branches
repoCond = repoCond.Or(builder.Eq{"id": opts.BaseRepo.ID})
diff --git a/models/git/branch_test.go b/models/git/branch_test.go
index b8ea663e81..252dcc5690 100644
--- a/models/git/branch_test.go
+++ b/models/git/branch_test.go
@@ -21,7 +21,7 @@ import (
func TestAddDeletedBranch(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
- assert.EqualValues(t, git.Sha1ObjectFormat.Name(), repo.ObjectFormatName)
+ assert.Equal(t, git.Sha1ObjectFormat.Name(), repo.ObjectFormatName)
firstBranch := unittest.AssertExistsAndLoadBean(t, &git_model.Branch{ID: 1})
assert.True(t, firstBranch.IsDeleted)
diff --git a/models/git/commit_status.go b/models/git/commit_status.go
index 5432eea55e..e255bca5d0 100644
--- a/models/git/commit_status.go
+++ b/models/git/commit_status.go
@@ -17,10 +17,10 @@ import (
"code.gitea.io/gitea/models/db"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/commitstatus"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
- api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/translation"
@@ -30,17 +30,17 @@ import (
// CommitStatus holds a single Status of a single Commit
type CommitStatus struct {
- ID int64 `xorm:"pk autoincr"`
- Index int64 `xorm:"INDEX UNIQUE(repo_sha_index)"`
- RepoID int64 `xorm:"INDEX UNIQUE(repo_sha_index)"`
- Repo *repo_model.Repository `xorm:"-"`
- State api.CommitStatusState `xorm:"VARCHAR(7) NOT NULL"`
- SHA string `xorm:"VARCHAR(64) NOT NULL INDEX UNIQUE(repo_sha_index)"`
- TargetURL string `xorm:"TEXT"`
- Description string `xorm:"TEXT"`
- ContextHash string `xorm:"VARCHAR(64) index"`
- Context string `xorm:"TEXT"`
- Creator *user_model.User `xorm:"-"`
+ ID int64 `xorm:"pk autoincr"`
+ Index int64 `xorm:"INDEX UNIQUE(repo_sha_index)"`
+ RepoID int64 `xorm:"INDEX UNIQUE(repo_sha_index)"`
+ Repo *repo_model.Repository `xorm:"-"`
+ State commitstatus.CommitStatusState `xorm:"VARCHAR(7) NOT NULL"`
+ SHA string `xorm:"VARCHAR(64) NOT NULL INDEX UNIQUE(repo_sha_index)"`
+ TargetURL string `xorm:"TEXT"`
+ Description string `xorm:"TEXT"`
+ ContextHash string `xorm:"VARCHAR(64) index"`
+ Context string `xorm:"TEXT"`
+ Creator *user_model.User `xorm:"-"`
CreatorID int64
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
@@ -222,7 +222,7 @@ func (status *CommitStatus) HideActionsURL(ctx context.Context) {
}
}
- prefix := fmt.Sprintf("%s/actions", status.Repo.Link())
+ prefix := status.Repo.Link() + "/actions"
if strings.HasPrefix(status.TargetURL, prefix) {
status.TargetURL = ""
}
@@ -230,22 +230,25 @@ func (status *CommitStatus) HideActionsURL(ctx context.Context) {
// CalcCommitStatus returns commit status state via some status, the commit statues should order by id desc
func CalcCommitStatus(statuses []*CommitStatus) *CommitStatus {
- var lastStatus *CommitStatus
- state := api.CommitStatusSuccess
+ if len(statuses) == 0 {
+ return nil
+ }
+
+ states := make(commitstatus.CommitStatusStates, 0, len(statuses))
+ targetURL := ""
for _, status := range statuses {
- if status.State.NoBetterThan(state) {
- state = status.State
- lastStatus = status
+ states = append(states, status.State)
+ if status.TargetURL != "" {
+ targetURL = status.TargetURL
}
}
- if lastStatus == nil {
- if len(statuses) > 0 {
- lastStatus = statuses[0]
- } else {
- lastStatus = &CommitStatus{}
- }
+
+ return &CommitStatus{
+ RepoID: statuses[0].RepoID,
+ SHA: statuses[0].SHA,
+ State: states.Combine(),
+ TargetURL: targetURL,
}
- return lastStatus
}
// CommitStatusOptions holds the options for query commit statuses
@@ -298,27 +301,37 @@ type CommitStatusIndex struct {
MaxIndex int64 `xorm:"index"`
}
+func makeRepoCommitQuery(ctx context.Context, repoID int64, sha string) *xorm.Session {
+ return db.GetEngine(ctx).Table(&CommitStatus{}).
+ Where("repo_id = ?", repoID).And("sha = ?", sha)
+}
+
// GetLatestCommitStatus returns all statuses with a unique context for a given commit.
-func GetLatestCommitStatus(ctx context.Context, repoID int64, sha string, listOptions db.ListOptions) ([]*CommitStatus, int64, error) {
- getBase := func() *xorm.Session {
- return db.GetEngine(ctx).Table(&CommitStatus{}).
- Where("repo_id = ?", repoID).And("sha = ?", sha)
- }
+func GetLatestCommitStatus(ctx context.Context, repoID int64, sha string, listOptions db.ListOptions) ([]*CommitStatus, error) {
indices := make([]int64, 0, 10)
- sess := getBase().Select("max( `index` ) as `index`").
- GroupBy("context_hash").OrderBy("max( `index` ) desc")
+ sess := makeRepoCommitQuery(ctx, repoID, sha).
+ Select("max( `index` ) as `index`").
+ GroupBy("context_hash").
+ OrderBy("max( `index` ) desc")
if !listOptions.IsListAll() {
sess = db.SetSessionPagination(sess, &listOptions)
}
- count, err := sess.FindAndCount(&indices)
- if err != nil {
- return nil, count, err
+ if err := sess.Find(&indices); err != nil {
+ return nil, err
}
statuses := make([]*CommitStatus, 0, len(indices))
if len(indices) == 0 {
- return statuses, count, nil
+ return statuses, nil
}
- return statuses, count, getBase().And(builder.In("`index`", indices)).Find(&statuses)
+ err := makeRepoCommitQuery(ctx, repoID, sha).And(builder.In("`index`", indices)).Find(&statuses)
+ return statuses, err
+}
+
+func CountLatestCommitStatus(ctx context.Context, repoID int64, sha string) (int64, error) {
+ return makeRepoCommitQuery(ctx, repoID, sha).
+ Select("count(context_hash)").
+ GroupBy("context_hash").
+ Count()
}
// GetLatestCommitStatusForPairs returns all statuses with a unique context for a given list of repo-sha pairs
@@ -453,40 +466,35 @@ func NewCommitStatus(ctx context.Context, opts NewCommitStatusOptions) error {
return fmt.Errorf("NewCommitStatus[nil, %s]: no repository specified", opts.SHA)
}
- repoPath := opts.Repo.RepoPath()
if opts.Creator == nil {
- return fmt.Errorf("NewCommitStatus[%s, %s]: no user specified", repoPath, opts.SHA)
+ return fmt.Errorf("NewCommitStatus[%s, %s]: no user specified", opts.Repo.FullName(), opts.SHA)
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return fmt.Errorf("NewCommitStatus[repo_id: %d, user_id: %d, sha: %s]: %w", opts.Repo.ID, opts.Creator.ID, opts.SHA, err)
- }
- defer committer.Close()
-
- // Get the next Status Index
- idx, err := GetNextCommitStatusIndex(ctx, opts.Repo.ID, opts.SHA.String())
- if err != nil {
- return fmt.Errorf("generate commit status index failed: %w", err)
- }
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ // Get the next Status Index
+ idx, err := GetNextCommitStatusIndex(ctx, opts.Repo.ID, opts.SHA.String())
+ if err != nil {
+ return fmt.Errorf("generate commit status index failed: %w", err)
+ }
- opts.CommitStatus.Description = strings.TrimSpace(opts.CommitStatus.Description)
- opts.CommitStatus.Context = strings.TrimSpace(opts.CommitStatus.Context)
- opts.CommitStatus.TargetURL = strings.TrimSpace(opts.CommitStatus.TargetURL)
- opts.CommitStatus.SHA = opts.SHA.String()
- opts.CommitStatus.CreatorID = opts.Creator.ID
- opts.CommitStatus.RepoID = opts.Repo.ID
- opts.CommitStatus.Index = idx
- log.Debug("NewCommitStatus[%s, %s]: %d", repoPath, opts.SHA, opts.CommitStatus.Index)
+ opts.CommitStatus.Description = strings.TrimSpace(opts.CommitStatus.Description)
+ opts.CommitStatus.Context = strings.TrimSpace(opts.CommitStatus.Context)
+ opts.CommitStatus.TargetURL = strings.TrimSpace(opts.CommitStatus.TargetURL)
+ opts.CommitStatus.SHA = opts.SHA.String()
+ opts.CommitStatus.CreatorID = opts.Creator.ID
+ opts.CommitStatus.RepoID = opts.Repo.ID
+ opts.CommitStatus.Index = idx
+ log.Debug("NewCommitStatus[%s, %s]: %d", opts.Repo.FullName(), opts.SHA, opts.CommitStatus.Index)
- opts.CommitStatus.ContextHash = hashCommitStatusContext(opts.CommitStatus.Context)
+ opts.CommitStatus.ContextHash = hashCommitStatusContext(opts.CommitStatus.Context)
- // Insert new CommitStatus
- if _, err = db.GetEngine(ctx).Insert(opts.CommitStatus); err != nil {
- return fmt.Errorf("insert CommitStatus[%s, %s]: %w", repoPath, opts.SHA, err)
- }
+ // Insert new CommitStatus
+ if err = db.Insert(ctx, opts.CommitStatus); err != nil {
+ return fmt.Errorf("insert CommitStatus[%s, %s]: %w", opts.Repo.FullName(), opts.SHA, err)
+ }
- return committer.Commit()
+ return nil
+ })
}
// SignCommitWithStatuses represents a commit with validation of signature and status state.
diff --git a/models/git/commit_status_summary.go b/models/git/commit_status_summary.go
index 7603e7aa65..dd416fa015 100644
--- a/models/git/commit_status_summary.go
+++ b/models/git/commit_status_summary.go
@@ -7,19 +7,19 @@ import (
"context"
"code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/commitstatus"
"code.gitea.io/gitea/modules/setting"
- api "code.gitea.io/gitea/modules/structs"
"xorm.io/builder"
)
// CommitStatusSummary holds the latest commit Status of a single Commit
type CommitStatusSummary struct {
- ID int64 `xorm:"pk autoincr"`
- RepoID int64 `xorm:"INDEX UNIQUE(repo_id_sha)"`
- SHA string `xorm:"VARCHAR(64) NOT NULL INDEX UNIQUE(repo_id_sha)"`
- State api.CommitStatusState `xorm:"VARCHAR(7) NOT NULL"`
- TargetURL string `xorm:"TEXT"`
+ ID int64 `xorm:"pk autoincr"`
+ RepoID int64 `xorm:"INDEX UNIQUE(repo_id_sha)"`
+ SHA string `xorm:"VARCHAR(64) NOT NULL INDEX UNIQUE(repo_id_sha)"`
+ State commitstatus.CommitStatusState `xorm:"VARCHAR(7) NOT NULL"`
+ TargetURL string `xorm:"TEXT"`
}
func init() {
@@ -55,11 +55,15 @@ func GetLatestCommitStatusForRepoAndSHAs(ctx context.Context, repoSHAs []RepoSHA
}
func UpdateCommitStatusSummary(ctx context.Context, repoID int64, sha string) error {
- commitStatuses, _, err := GetLatestCommitStatus(ctx, repoID, sha, db.ListOptionsAll)
+ commitStatuses, err := GetLatestCommitStatus(ctx, repoID, sha, db.ListOptionsAll)
if err != nil {
return err
}
- state := CalcCommitStatus(commitStatuses)
+ // it guarantees that commitStatuses is not empty because this function is always called after a commit status is created
+ if len(commitStatuses) == 0 {
+ setting.PanicInDevOrTesting("no commit statuses found for repo %d and sha %s", repoID, sha)
+ }
+ state := CalcCommitStatus(commitStatuses) // non-empty commitStatuses is guaranteed
// mysql will return 0 when update a record which state hasn't been changed which behaviour is different from other database,
// so we need to use insert in on duplicate
if setting.Database.Type.IsMySQL() {
diff --git a/models/git/commit_status_test.go b/models/git/commit_status_test.go
index 37d785e938..4c0f5e891b 100644
--- a/models/git/commit_status_test.go
+++ b/models/git/commit_status_test.go
@@ -14,9 +14,9 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/commitstatus"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/gitrepo"
- "code.gitea.io/gitea/modules/structs"
"github.com/stretchr/testify/assert"
)
@@ -26,7 +26,7 @@ func TestGetCommitStatuses(t *testing.T) {
repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
- sha1 := "1234123412341234123412341234123412341234"
+ sha1 := "1234123412341234123412341234123412341234" // the mocked commit ID in test fixtures
statuses, maxResults, err := db.FindAndCount[git_model.CommitStatus](db.DefaultContext, &git_model.CommitStatusOptions{
ListOptions: db.ListOptions{Page: 1, PageSize: 50},
@@ -38,23 +38,23 @@ func TestGetCommitStatuses(t *testing.T) {
assert.Len(t, statuses, 5)
assert.Equal(t, "ci/awesomeness", statuses[0].Context)
- assert.Equal(t, structs.CommitStatusPending, statuses[0].State)
+ assert.Equal(t, commitstatus.CommitStatusPending, statuses[0].State)
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[0].APIURL(db.DefaultContext))
assert.Equal(t, "cov/awesomeness", statuses[1].Context)
- assert.Equal(t, structs.CommitStatusWarning, statuses[1].State)
+ assert.Equal(t, commitstatus.CommitStatusWarning, statuses[1].State)
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[1].APIURL(db.DefaultContext))
assert.Equal(t, "cov/awesomeness", statuses[2].Context)
- assert.Equal(t, structs.CommitStatusSuccess, statuses[2].State)
+ assert.Equal(t, commitstatus.CommitStatusSuccess, statuses[2].State)
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[2].APIURL(db.DefaultContext))
assert.Equal(t, "ci/awesomeness", statuses[3].Context)
- assert.Equal(t, structs.CommitStatusFailure, statuses[3].State)
+ assert.Equal(t, commitstatus.CommitStatusFailure, statuses[3].State)
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[3].APIURL(db.DefaultContext))
assert.Equal(t, "deploy/awesomeness", statuses[4].Context)
- assert.Equal(t, structs.CommitStatusError, statuses[4].State)
+ assert.Equal(t, commitstatus.CommitStatusError, statuses[4].State)
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[4].APIURL(db.DefaultContext))
statuses, maxResults, err = db.FindAndCount[git_model.CommitStatus](db.DefaultContext, &git_model.CommitStatusOptions{
@@ -75,110 +75,110 @@ func Test_CalcCommitStatus(t *testing.T) {
{
statuses: []*git_model.CommitStatus{
{
- State: structs.CommitStatusPending,
+ State: commitstatus.CommitStatusPending,
},
},
expected: &git_model.CommitStatus{
- State: structs.CommitStatusPending,
+ State: commitstatus.CommitStatusPending,
},
},
{
statuses: []*git_model.CommitStatus{
{
- State: structs.CommitStatusSuccess,
+ State: commitstatus.CommitStatusSuccess,
},
{
- State: structs.CommitStatusPending,
+ State: commitstatus.CommitStatusPending,
},
},
expected: &git_model.CommitStatus{
- State: structs.CommitStatusPending,
+ State: commitstatus.CommitStatusPending,
},
},
{
statuses: []*git_model.CommitStatus{
{
- State: structs.CommitStatusSuccess,
+ State: commitstatus.CommitStatusSuccess,
},
{
- State: structs.CommitStatusPending,
+ State: commitstatus.CommitStatusPending,
},
{
- State: structs.CommitStatusSuccess,
+ State: commitstatus.CommitStatusSuccess,
},
},
expected: &git_model.CommitStatus{
- State: structs.CommitStatusPending,
+ State: commitstatus.CommitStatusPending,
},
},
{
statuses: []*git_model.CommitStatus{
{
- State: structs.CommitStatusError,
+ State: commitstatus.CommitStatusError,
},
{
- State: structs.CommitStatusPending,
+ State: commitstatus.CommitStatusPending,
},
{
- State: structs.CommitStatusSuccess,
+ State: commitstatus.CommitStatusSuccess,
},
},
expected: &git_model.CommitStatus{
- State: structs.CommitStatusError,
+ State: commitstatus.CommitStatusFailure,
},
},
{
statuses: []*git_model.CommitStatus{
{
- State: structs.CommitStatusWarning,
+ State: commitstatus.CommitStatusWarning,
},
{
- State: structs.CommitStatusPending,
+ State: commitstatus.CommitStatusPending,
},
{
- State: structs.CommitStatusSuccess,
+ State: commitstatus.CommitStatusSuccess,
},
},
expected: &git_model.CommitStatus{
- State: structs.CommitStatusWarning,
+ State: commitstatus.CommitStatusPending,
},
},
{
statuses: []*git_model.CommitStatus{
{
- State: structs.CommitStatusSuccess,
+ State: commitstatus.CommitStatusSuccess,
},
{
- State: structs.CommitStatusSuccess,
+ State: commitstatus.CommitStatusSuccess,
},
{
- State: structs.CommitStatusSuccess,
+ State: commitstatus.CommitStatusSuccess,
},
},
expected: &git_model.CommitStatus{
- State: structs.CommitStatusSuccess,
+ State: commitstatus.CommitStatusSuccess,
},
},
{
statuses: []*git_model.CommitStatus{
{
- State: structs.CommitStatusFailure,
+ State: commitstatus.CommitStatusFailure,
},
{
- State: structs.CommitStatusError,
+ State: commitstatus.CommitStatusError,
},
{
- State: structs.CommitStatusWarning,
+ State: commitstatus.CommitStatusWarning,
},
},
expected: &git_model.CommitStatus{
- State: structs.CommitStatusError,
+ State: commitstatus.CommitStatusFailure,
},
},
}
for _, kase := range kases {
- assert.Equal(t, kase.expected, git_model.CalcCommitStatus(kase.statuses))
+ assert.Equal(t, kase.expected, git_model.CalcCommitStatus(kase.statuses), "statuses: %v", kase.statuses)
}
}
@@ -208,7 +208,7 @@ func TestFindRepoRecentCommitStatusContexts(t *testing.T) {
Creator: user2,
SHA: commit.ID,
CommitStatus: &git_model.CommitStatus{
- State: structs.CommitStatusFailure,
+ State: commitstatus.CommitStatusFailure,
TargetURL: "https://example.com/tests/",
Context: "compliance/lint-backend",
},
@@ -220,7 +220,7 @@ func TestFindRepoRecentCommitStatusContexts(t *testing.T) {
Creator: user2,
SHA: commit.ID,
CommitStatus: &git_model.CommitStatus{
- State: structs.CommitStatusSuccess,
+ State: commitstatus.CommitStatusSuccess,
TargetURL: "https://example.com/tests/",
Context: "compliance/lint-backend",
},
@@ -256,3 +256,26 @@ func TestCommitStatusesHideActionsURL(t *testing.T) {
assert.Empty(t, statuses[0].TargetURL)
assert.Equal(t, "https://mycicd.org/1", statuses[1].TargetURL)
}
+
+func TestGetCountLatestCommitStatus(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+
+ repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+
+ sha1 := "1234123412341234123412341234123412341234" // the mocked commit ID in test fixtures
+
+ commitStatuses, err := git_model.GetLatestCommitStatus(db.DefaultContext, repo1.ID, sha1, db.ListOptions{
+ Page: 1,
+ PageSize: 2,
+ })
+ assert.NoError(t, err)
+ assert.Len(t, commitStatuses, 2)
+ assert.Equal(t, commitstatus.CommitStatusFailure, commitStatuses[0].State)
+ assert.Equal(t, "ci/awesomeness", commitStatuses[0].Context)
+ assert.Equal(t, commitstatus.CommitStatusError, commitStatuses[1].State)
+ assert.Equal(t, "deploy/awesomeness", commitStatuses[1].Context)
+
+ count, err := git_model.CountLatestCommitStatus(db.DefaultContext, repo1.ID, sha1)
+ assert.NoError(t, err)
+ assert.EqualValues(t, 3, count)
+}
diff --git a/models/git/lfs.go b/models/git/lfs.go
index bb6361050a..c471baf588 100644
--- a/models/git/lfs.go
+++ b/models/git/lfs.go
@@ -112,7 +112,6 @@ type LFSMetaObject struct {
ID int64 `xorm:"pk autoincr"`
lfs.Pointer `xorm:"extends"`
RepositoryID int64 `xorm:"UNIQUE(s) INDEX NOT NULL"`
- Existing bool `xorm:"-"`
CreatedUnix timeutil.TimeStamp `xorm:"created"`
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
}
@@ -136,26 +135,18 @@ var ErrLFSObjectNotExist = db.ErrNotExist{Resource: "LFS Meta object"}
// NewLFSMetaObject stores a given populated LFSMetaObject structure in the database
// if it is not already present.
func NewLFSMetaObject(ctx context.Context, repoID int64, p lfs.Pointer) (*LFSMetaObject, error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return nil, err
- }
- defer committer.Close()
-
m, exist, err := db.Get[LFSMetaObject](ctx, builder.Eq{"repository_id": repoID, "oid": p.Oid})
if err != nil {
return nil, err
} else if exist {
- m.Existing = true
- return m, committer.Commit()
+ return m, nil
}
m = &LFSMetaObject{Pointer: p, RepositoryID: repoID}
if err = db.Insert(ctx, m); err != nil {
return nil, err
}
-
- return m, committer.Commit()
+ return m, nil
}
// GetLFSMetaObjectByOid selects a LFSMetaObject entry from database by its OID.
diff --git a/models/git/lfs_lock.go b/models/git/lfs_lock.go
index 07ce7d4abf..c5f9a4e6de 100644
--- a/models/git/lfs_lock.go
+++ b/models/git/lfs_lock.go
@@ -70,32 +70,28 @@ func (l *LFSLock) LoadOwner(ctx context.Context) error {
// CreateLFSLock creates a new lock.
func CreateLFSLock(ctx context.Context, repo *repo_model.Repository, lock *LFSLock) (*LFSLock, error) {
- dbCtx, committer, err := db.TxContext(ctx)
- if err != nil {
- return nil, err
- }
- defer committer.Close()
-
- if err := CheckLFSAccessForRepo(dbCtx, lock.OwnerID, repo, perm.AccessModeWrite); err != nil {
- return nil, err
- }
+ return db.WithTx2(ctx, func(ctx context.Context) (*LFSLock, error) {
+ if err := CheckLFSAccessForRepo(ctx, lock.OwnerID, repo, perm.AccessModeWrite); err != nil {
+ return nil, err
+ }
- lock.Path = util.PathJoinRel(lock.Path)
- lock.RepoID = repo.ID
+ lock.Path = util.PathJoinRel(lock.Path)
+ lock.RepoID = repo.ID
- l, err := GetLFSLock(dbCtx, repo, lock.Path)
- if err == nil {
- return l, ErrLFSLockAlreadyExist{lock.RepoID, lock.Path}
- }
- if !IsErrLFSLockNotExist(err) {
- return nil, err
- }
+ l, err := GetLFSLock(ctx, repo, lock.Path)
+ if err == nil {
+ return l, ErrLFSLockAlreadyExist{lock.RepoID, lock.Path}
+ }
+ if !IsErrLFSLockNotExist(err) {
+ return nil, err
+ }
- if err := db.Insert(dbCtx, lock); err != nil {
- return nil, err
- }
+ if err := db.Insert(ctx, lock); err != nil {
+ return nil, err
+ }
- return lock, committer.Commit()
+ return lock, nil
+ })
}
// GetLFSLock returns release by given path.
@@ -163,30 +159,26 @@ func CountLFSLockByRepoID(ctx context.Context, repoID int64) (int64, error) {
// DeleteLFSLockByID deletes a lock by given ID.
func DeleteLFSLockByID(ctx context.Context, id int64, repo *repo_model.Repository, u *user_model.User, force bool) (*LFSLock, error) {
- dbCtx, committer, err := db.TxContext(ctx)
- if err != nil {
- return nil, err
- }
- defer committer.Close()
-
- lock, err := GetLFSLockByID(dbCtx, id)
- if err != nil {
- return nil, err
- }
+ return db.WithTx2(ctx, func(ctx context.Context) (*LFSLock, error) {
+ lock, err := GetLFSLockByID(ctx, id)
+ if err != nil {
+ return nil, err
+ }
- if err := CheckLFSAccessForRepo(dbCtx, u.ID, repo, perm.AccessModeWrite); err != nil {
- return nil, err
- }
+ if err := CheckLFSAccessForRepo(ctx, u.ID, repo, perm.AccessModeWrite); err != nil {
+ return nil, err
+ }
- if !force && u.ID != lock.OwnerID {
- return nil, errors.New("user doesn't own lock and force flag is not set")
- }
+ if !force && u.ID != lock.OwnerID {
+ return nil, errors.New("user doesn't own lock and force flag is not set")
+ }
- if _, err := db.GetEngine(dbCtx).ID(id).Delete(new(LFSLock)); err != nil {
- return nil, err
- }
+ if _, err := db.GetEngine(ctx).ID(id).Delete(new(LFSLock)); err != nil {
+ return nil, err
+ }
- return lock, committer.Commit()
+ return lock, nil
+ })
}
// CheckLFSAccessForRepo check needed access mode base on action
diff --git a/models/git/protected_branch.go b/models/git/protected_branch.go
index a3caed73c4..55bbe6938c 100644
--- a/models/git/protected_branch.go
+++ b/models/git/protected_branch.go
@@ -246,7 +246,7 @@ func (protectBranch *ProtectedBranch) GetUnprotectedFilePatterns() []glob.Glob {
func getFilePatterns(filePatterns string) []glob.Glob {
extarr := make([]glob.Glob, 0, 10)
- for _, expr := range strings.Split(strings.ToLower(filePatterns), ";") {
+ for expr := range strings.SplitSeq(strings.ToLower(filePatterns), ";") {
expr = strings.TrimSpace(expr)
if expr != "" {
if g, err := glob.Compile(expr, '.', '/'); err != nil {
@@ -518,7 +518,7 @@ func updateTeamWhitelist(ctx context.Context, repo *repo_model.Repository, curre
return currentWhitelist, nil
}
- teams, err := organization.GetTeamsWithAccessToRepo(ctx, repo.OwnerID, repo.ID, perm.AccessModeRead)
+ teams, err := organization.GetTeamsWithAccessToAnyRepoUnit(ctx, repo.OwnerID, repo.ID, perm.AccessModeRead, unit.TypeCode, unit.TypePullRequests)
if err != nil {
return nil, fmt.Errorf("GetTeamsWithAccessToRepo [org_id: %d, repo_id: %d]: %v", repo.OwnerID, repo.ID, err)
}
diff --git a/models/git/protected_branch_list_test.go b/models/git/protected_branch_list_test.go
index a46402c543..298c2fa074 100644
--- a/models/git/protected_branch_list_test.go
+++ b/models/git/protected_branch_list_test.go
@@ -70,7 +70,7 @@ func TestBranchRuleMatchPriority(t *testing.T) {
assert.Error(t, fmt.Errorf("no matched rules but expected %s[%d]", kase.Rules[kase.ExpectedMatchIdx], kase.ExpectedMatchIdx))
}
} else {
- assert.EqualValues(t, kase.Rules[kase.ExpectedMatchIdx], matchedPB.RuleName)
+ assert.Equal(t, kase.Rules[kase.ExpectedMatchIdx], matchedPB.RuleName)
}
}
}
diff --git a/models/git/protected_branch_test.go b/models/git/protected_branch_test.go
index e1c91d927d..367992081d 100644
--- a/models/git/protected_branch_test.go
+++ b/models/git/protected_branch_test.go
@@ -74,7 +74,7 @@ func TestBranchRuleMatch(t *testing.T) {
} else {
infact = " not"
}
- assert.EqualValues(t, kase.ExpectedMatch, pb.Match(kase.BranchName),
+ assert.Equal(t, kase.ExpectedMatch, pb.Match(kase.BranchName),
"%s should%s match %s but it is%s", kase.BranchName, should, kase.Rule, infact,
)
}
diff --git a/models/issues/comment.go b/models/issues/comment.go
index ab9b2042f3..d22f08fa87 100644
--- a/models/issues/comment.go
+++ b/models/issues/comment.go
@@ -9,6 +9,7 @@ import (
"context"
"fmt"
"html/template"
+ "slices"
"strconv"
"unicode/utf8"
@@ -196,12 +197,7 @@ func (t CommentType) HasMailReplySupport() bool {
}
func (t CommentType) CountedAsConversation() bool {
- for _, ct := range ConversationCountedCommentType() {
- if t == ct {
- return true
- }
- }
- return false
+ return slices.Contains(ConversationCountedCommentType(), t)
}
// ConversationCountedCommentType returns the comment types that are counted as a conversation
@@ -614,7 +610,7 @@ func UpdateCommentAttachments(ctx context.Context, c *Comment, uuids []string) e
if err != nil {
return fmt.Errorf("getAttachmentsByUUIDs [uuids: %v]: %w", uuids, err)
}
- for i := 0; i < len(attachments); i++ {
+ for i := range attachments {
attachments[i].IssueID = c.IssueID
attachments[i].CommentID = c.ID
if err := repo_model.UpdateAttachment(ctx, attachments[i]); err != nil {
@@ -719,7 +715,8 @@ func (c *Comment) LoadReactions(ctx context.Context, repo *repo_model.Repository
return nil
}
-func (c *Comment) loadReview(ctx context.Context) (err error) {
+// LoadReview loads the associated review
+func (c *Comment) LoadReview(ctx context.Context) (err error) {
if c.ReviewID == 0 {
return nil
}
@@ -736,11 +733,6 @@ func (c *Comment) loadReview(ctx context.Context) (err error) {
return nil
}
-// LoadReview loads the associated review
-func (c *Comment) LoadReview(ctx context.Context) error {
- return c.loadReview(ctx)
-}
-
// DiffSide returns "previous" if Comment.Line is a LOC of the previous changes and "proposed" if it is a LOC of the proposed changes.
func (c *Comment) DiffSide() string {
if c.Line < 0 {
@@ -774,81 +766,73 @@ func (c *Comment) CodeCommentLink(ctx context.Context) string {
// CreateComment creates comment with context
func CreateComment(ctx context.Context, opts *CreateCommentOptions) (_ *Comment, err error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return nil, err
- }
- defer committer.Close()
-
- e := db.GetEngine(ctx)
- var LabelID int64
- if opts.Label != nil {
- LabelID = opts.Label.ID
- }
+ return db.WithTx2(ctx, func(ctx context.Context) (*Comment, error) {
+ var LabelID int64
+ if opts.Label != nil {
+ LabelID = opts.Label.ID
+ }
- var commentMetaData *CommentMetaData
- if opts.ProjectColumnTitle != "" {
- commentMetaData = &CommentMetaData{
- ProjectColumnID: opts.ProjectColumnID,
- ProjectColumnTitle: opts.ProjectColumnTitle,
- ProjectTitle: opts.ProjectTitle,
+ var commentMetaData *CommentMetaData
+ if opts.ProjectColumnTitle != "" {
+ commentMetaData = &CommentMetaData{
+ ProjectColumnID: opts.ProjectColumnID,
+ ProjectColumnTitle: opts.ProjectColumnTitle,
+ ProjectTitle: opts.ProjectTitle,
+ }
}
- }
- comment := &Comment{
- Type: opts.Type,
- PosterID: opts.Doer.ID,
- Poster: opts.Doer,
- IssueID: opts.Issue.ID,
- LabelID: LabelID,
- OldMilestoneID: opts.OldMilestoneID,
- MilestoneID: opts.MilestoneID,
- OldProjectID: opts.OldProjectID,
- ProjectID: opts.ProjectID,
- TimeID: opts.TimeID,
- RemovedAssignee: opts.RemovedAssignee,
- AssigneeID: opts.AssigneeID,
- AssigneeTeamID: opts.AssigneeTeamID,
- CommitID: opts.CommitID,
- CommitSHA: opts.CommitSHA,
- Line: opts.LineNum,
- Content: opts.Content,
- OldTitle: opts.OldTitle,
- NewTitle: opts.NewTitle,
- OldRef: opts.OldRef,
- NewRef: opts.NewRef,
- DependentIssueID: opts.DependentIssueID,
- TreePath: opts.TreePath,
- ReviewID: opts.ReviewID,
- Patch: opts.Patch,
- RefRepoID: opts.RefRepoID,
- RefIssueID: opts.RefIssueID,
- RefCommentID: opts.RefCommentID,
- RefAction: opts.RefAction,
- RefIsPull: opts.RefIsPull,
- IsForcePush: opts.IsForcePush,
- Invalidated: opts.Invalidated,
- CommentMetaData: commentMetaData,
- }
- if _, err = e.Insert(comment); err != nil {
- return nil, err
- }
+ comment := &Comment{
+ Type: opts.Type,
+ PosterID: opts.Doer.ID,
+ Poster: opts.Doer,
+ IssueID: opts.Issue.ID,
+ LabelID: LabelID,
+ OldMilestoneID: opts.OldMilestoneID,
+ MilestoneID: opts.MilestoneID,
+ OldProjectID: opts.OldProjectID,
+ ProjectID: opts.ProjectID,
+ TimeID: opts.TimeID,
+ RemovedAssignee: opts.RemovedAssignee,
+ AssigneeID: opts.AssigneeID,
+ AssigneeTeamID: opts.AssigneeTeamID,
+ CommitID: opts.CommitID,
+ CommitSHA: opts.CommitSHA,
+ Line: opts.LineNum,
+ Content: opts.Content,
+ OldTitle: opts.OldTitle,
+ NewTitle: opts.NewTitle,
+ OldRef: opts.OldRef,
+ NewRef: opts.NewRef,
+ DependentIssueID: opts.DependentIssueID,
+ TreePath: opts.TreePath,
+ ReviewID: opts.ReviewID,
+ Patch: opts.Patch,
+ RefRepoID: opts.RefRepoID,
+ RefIssueID: opts.RefIssueID,
+ RefCommentID: opts.RefCommentID,
+ RefAction: opts.RefAction,
+ RefIsPull: opts.RefIsPull,
+ IsForcePush: opts.IsForcePush,
+ Invalidated: opts.Invalidated,
+ CommentMetaData: commentMetaData,
+ }
+ if err = db.Insert(ctx, comment); err != nil {
+ return nil, err
+ }
- if err = opts.Repo.LoadOwner(ctx); err != nil {
- return nil, err
- }
+ if err = opts.Repo.LoadOwner(ctx); err != nil {
+ return nil, err
+ }
- if err = updateCommentInfos(ctx, opts, comment); err != nil {
- return nil, err
- }
+ if err = updateCommentInfos(ctx, opts, comment); err != nil {
+ return nil, err
+ }
- if err = comment.AddCrossReferences(ctx, opts.Doer, false); err != nil {
- return nil, err
- }
- if err = committer.Commit(); err != nil {
- return nil, err
- }
- return comment, nil
+ if err = comment.AddCrossReferences(ctx, opts.Doer, false); err != nil {
+ return nil, err
+ }
+ return comment, nil
+ })
}
func updateCommentInfos(ctx context.Context, opts *CreateCommentOptions, comment *Comment) (err error) {
@@ -860,7 +844,7 @@ func updateCommentInfos(ctx context.Context, opts *CreateCommentOptions, comment
}
if comment.ReviewID != 0 {
if comment.Review == nil {
- if err := comment.loadReview(ctx); err != nil {
+ if err := comment.LoadReview(ctx); err != nil {
return err
}
}
@@ -1100,33 +1084,21 @@ func UpdateCommentInvalidate(ctx context.Context, c *Comment) error {
// UpdateComment updates information of comment.
func UpdateComment(ctx context.Context, c *Comment, contentVersion int, doer *user_model.User) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
- sess := db.GetEngine(ctx)
-
- c.ContentVersion = contentVersion + 1
-
- affected, err := sess.ID(c.ID).AllCols().Where("content_version = ?", contentVersion).Update(c)
- if err != nil {
- return err
- }
- if affected == 0 {
- return ErrCommentAlreadyChanged
- }
- if err := c.LoadIssue(ctx); err != nil {
- return err
- }
- if err := c.AddCrossReferences(ctx, doer, true); err != nil {
- return err
- }
- if err := committer.Commit(); err != nil {
- return fmt.Errorf("Commit: %w", err)
- }
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ c.ContentVersion = contentVersion + 1
- return nil
+ affected, err := db.GetEngine(ctx).ID(c.ID).AllCols().Where("content_version = ?", contentVersion).Update(c)
+ if err != nil {
+ return err
+ }
+ if affected == 0 {
+ return ErrCommentAlreadyChanged
+ }
+ if err := c.LoadIssue(ctx); err != nil {
+ return err
+ }
+ return c.AddCrossReferences(ctx, doer, true)
+ })
}
// DeleteComment deletes the comment
@@ -1285,31 +1257,28 @@ func InsertIssueComments(ctx context.Context, comments []*Comment) error {
return comment.IssueID, true
})
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
- for _, comment := range comments {
- if _, err := db.GetEngine(ctx).NoAutoTime().Insert(comment); err != nil {
- return err
- }
-
- for _, reaction := range comment.Reactions {
- reaction.IssueID = comment.IssueID
- reaction.CommentID = comment.ID
- }
- if len(comment.Reactions) > 0 {
- if err := db.Insert(ctx, comment.Reactions); err != nil {
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ for _, comment := range comments {
+ if _, err := db.GetEngine(ctx).NoAutoTime().Insert(comment); err != nil {
return err
}
+
+ for _, reaction := range comment.Reactions {
+ reaction.IssueID = comment.IssueID
+ reaction.CommentID = comment.ID
+ }
+ if len(comment.Reactions) > 0 {
+ if err := db.Insert(ctx, comment.Reactions); err != nil {
+ return err
+ }
+ }
}
- }
- for _, issueID := range issueIDs {
- if err := UpdateIssueNumComments(ctx, issueID); err != nil {
- return err
+ for _, issueID := range issueIDs {
+ if err := UpdateIssueNumComments(ctx, issueID); err != nil {
+ return err
+ }
}
- }
- return committer.Commit()
+ return nil
+ })
}
diff --git a/models/issues/comment_code.go b/models/issues/comment_code.go
index b562aab500..55e67a1243 100644
--- a/models/issues/comment_code.go
+++ b/models/issues/comment_code.go
@@ -5,6 +5,7 @@ package issues
import (
"context"
+ "strconv"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/renderhelper"
@@ -114,7 +115,9 @@ func findCodeComments(ctx context.Context, opts FindCommentsOptions, issue *Issu
}
var err error
- rctx := renderhelper.NewRenderContextRepoComment(ctx, issue.Repo)
+ rctx := renderhelper.NewRenderContextRepoComment(ctx, issue.Repo, renderhelper.RepoCommentOptions{
+ FootnoteContextID: strconv.FormatInt(comment.ID, 10),
+ })
if comment.RenderedContent, err = markdown.RenderString(rctx, comment.Content); err != nil {
return nil, err
}
diff --git a/models/issues/comment_list.go b/models/issues/comment_list.go
index c483ada75a..f6c485449f 100644
--- a/models/issues/comment_list.go
+++ b/models/issues/comment_list.go
@@ -57,10 +57,7 @@ func (comments CommentList) loadLabels(ctx context.Context) error {
commentLabels := make(map[int64]*Label, len(labelIDs))
left := len(labelIDs)
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
rows, err := db.GetEngine(ctx).
In("id", labelIDs[:limit]).
Rows(new(Label))
@@ -107,10 +104,7 @@ func (comments CommentList) loadMilestones(ctx context.Context) error {
milestoneMaps := make(map[int64]*Milestone, len(milestoneIDs))
left := len(milestoneIDs)
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
err := db.GetEngine(ctx).
In("id", milestoneIDs[:limit]).
Find(&milestoneMaps)
@@ -146,10 +140,7 @@ func (comments CommentList) loadOldMilestones(ctx context.Context) error {
milestoneMaps := make(map[int64]*Milestone, len(milestoneIDs))
left := len(milestoneIDs)
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
err := db.GetEngine(ctx).
In("id", milestoneIDs[:limit]).
Find(&milestoneMaps)
@@ -184,10 +175,7 @@ func (comments CommentList) loadAssignees(ctx context.Context) error {
assignees := make(map[int64]*user_model.User, len(assigneeIDs))
left := len(assigneeIDs)
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
rows, err := db.GetEngine(ctx).
In("id", assigneeIDs[:limit]).
Rows(new(user_model.User))
@@ -256,10 +244,7 @@ func (comments CommentList) LoadIssues(ctx context.Context) error {
issues := make(map[int64]*Issue, len(issueIDs))
left := len(issueIDs)
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
rows, err := db.GetEngine(ctx).
In("id", issueIDs[:limit]).
Rows(new(Issue))
@@ -313,10 +298,7 @@ func (comments CommentList) loadDependentIssues(ctx context.Context) error {
issues := make(map[int64]*Issue, len(issueIDs))
left := len(issueIDs)
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
rows, err := e.
In("id", issueIDs[:limit]).
Rows(new(Issue))
@@ -392,10 +374,7 @@ func (comments CommentList) LoadAttachments(ctx context.Context) (err error) {
commentsIDs := comments.getAttachmentCommentIDs()
left := len(commentsIDs)
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
rows, err := db.GetEngine(ctx).
In("comment_id", commentsIDs[:limit]).
Rows(new(repo_model.Attachment))
diff --git a/models/issues/comment_test.go b/models/issues/comment_test.go
index ae0bc3ce17..c08e3b970d 100644
--- a/models/issues/comment_test.go
+++ b/models/issues/comment_test.go
@@ -34,10 +34,10 @@ func TestCreateComment(t *testing.T) {
assert.NoError(t, err)
then := time.Now().Unix()
- assert.EqualValues(t, issues_model.CommentTypeComment, comment.Type)
- assert.EqualValues(t, "Hello", comment.Content)
- assert.EqualValues(t, issue.ID, comment.IssueID)
- assert.EqualValues(t, doer.ID, comment.PosterID)
+ assert.Equal(t, issues_model.CommentTypeComment, comment.Type)
+ assert.Equal(t, "Hello", comment.Content)
+ assert.Equal(t, issue.ID, comment.IssueID)
+ assert.Equal(t, doer.ID, comment.PosterID)
unittest.AssertInt64InRange(t, now, then, int64(comment.CreatedUnix))
unittest.AssertExistsAndLoadBean(t, comment) // assert actually added to DB
@@ -58,9 +58,9 @@ func Test_UpdateCommentAttachment(t *testing.T) {
assert.NoError(t, err)
attachment2 := unittest.AssertExistsAndLoadBean(t, &repo_model.Attachment{ID: attachment.ID})
- assert.EqualValues(t, attachment.Name, attachment2.Name)
- assert.EqualValues(t, comment.ID, attachment2.CommentID)
- assert.EqualValues(t, comment.IssueID, attachment2.IssueID)
+ assert.Equal(t, attachment.Name, attachment2.Name)
+ assert.Equal(t, comment.ID, attachment2.CommentID)
+ assert.Equal(t, comment.IssueID, attachment2.IssueID)
}
func TestFetchCodeComments(t *testing.T) {
@@ -111,7 +111,7 @@ func TestMigrate_InsertIssueComments(t *testing.T) {
assert.NoError(t, err)
issueModified := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1})
- assert.EqualValues(t, issue.NumComments+1, issueModified.NumComments)
+ assert.Equal(t, issue.NumComments+1, issueModified.NumComments)
unittest.CheckConsistencyFor(t, &issues_model.Issue{})
}
@@ -122,5 +122,5 @@ func Test_UpdateIssueNumComments(t *testing.T) {
assert.NoError(t, issues_model.UpdateIssueNumComments(db.DefaultContext, issue2.ID))
issue2 = unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2})
- assert.EqualValues(t, 1, issue2.NumComments)
+ assert.Equal(t, 1, issue2.NumComments)
}
diff --git a/models/issues/dependency.go b/models/issues/dependency.go
index 146dd1887d..0eaa47e359 100644
--- a/models/issues/dependency.go
+++ b/models/issues/dependency.go
@@ -128,79 +128,64 @@ const (
// CreateIssueDependency creates a new dependency for an issue
func CreateIssueDependency(ctx context.Context, user *user_model.User, issue, dep *Issue) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- // Check if it already exists
- exists, err := issueDepExists(ctx, issue.ID, dep.ID)
- if err != nil {
- return err
- }
- if exists {
- return ErrDependencyExists{issue.ID, dep.ID}
- }
- // And if it would be circular
- circular, err := issueDepExists(ctx, dep.ID, issue.ID)
- if err != nil {
- return err
- }
- if circular {
- return ErrCircularDependency{issue.ID, dep.ID}
- }
-
- if err := db.Insert(ctx, &IssueDependency{
- UserID: user.ID,
- IssueID: issue.ID,
- DependencyID: dep.ID,
- }); err != nil {
- return err
- }
-
- // Add comment referencing the new dependency
- if err = createIssueDependencyComment(ctx, user, issue, dep, true); err != nil {
- return err
- }
-
- return committer.Commit()
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ // Check if it already exists
+ exists, err := issueDepExists(ctx, issue.ID, dep.ID)
+ if err != nil {
+ return err
+ }
+ if exists {
+ return ErrDependencyExists{issue.ID, dep.ID}
+ }
+ // And if it would be circular
+ circular, err := issueDepExists(ctx, dep.ID, issue.ID)
+ if err != nil {
+ return err
+ }
+ if circular {
+ return ErrCircularDependency{issue.ID, dep.ID}
+ }
+
+ if err := db.Insert(ctx, &IssueDependency{
+ UserID: user.ID,
+ IssueID: issue.ID,
+ DependencyID: dep.ID,
+ }); err != nil {
+ return err
+ }
+
+ // Add comment referencing the new dependency
+ return createIssueDependencyComment(ctx, user, issue, dep, true)
+ })
}
// RemoveIssueDependency removes a dependency from an issue
func RemoveIssueDependency(ctx context.Context, user *user_model.User, issue, dep *Issue, depType DependencyType) (err error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- var issueDepToDelete IssueDependency
-
- switch depType {
- case DependencyTypeBlockedBy:
- issueDepToDelete = IssueDependency{IssueID: issue.ID, DependencyID: dep.ID}
- case DependencyTypeBlocking:
- issueDepToDelete = IssueDependency{IssueID: dep.ID, DependencyID: issue.ID}
- default:
- return ErrUnknownDependencyType{depType}
- }
-
- affected, err := db.GetEngine(ctx).Delete(&issueDepToDelete)
- if err != nil {
- return err
- }
-
- // If we deleted nothing, the dependency did not exist
- if affected <= 0 {
- return ErrDependencyNotExists{issue.ID, dep.ID}
- }
-
- // Add comment referencing the removed dependency
- if err = createIssueDependencyComment(ctx, user, issue, dep, false); err != nil {
- return err
- }
- return committer.Commit()
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ var issueDepToDelete IssueDependency
+
+ switch depType {
+ case DependencyTypeBlockedBy:
+ issueDepToDelete = IssueDependency{IssueID: issue.ID, DependencyID: dep.ID}
+ case DependencyTypeBlocking:
+ issueDepToDelete = IssueDependency{IssueID: dep.ID, DependencyID: issue.ID}
+ default:
+ return ErrUnknownDependencyType{depType}
+ }
+
+ affected, err := db.GetEngine(ctx).Delete(&issueDepToDelete)
+ if err != nil {
+ return err
+ }
+
+ // If we deleted nothing, the dependency did not exist
+ if affected <= 0 {
+ return ErrDependencyNotExists{issue.ID, dep.ID}
+ }
+
+ // Add comment referencing the removed dependency
+ return createIssueDependencyComment(ctx, user, issue, dep, false)
+ })
}
// Check if the dependency already exists
diff --git a/models/issues/issue.go b/models/issues/issue.go
index 5204f27faf..ef651359ab 100644
--- a/models/issues/issue.go
+++ b/models/issues/issue.go
@@ -10,6 +10,7 @@ import (
"html/template"
"regexp"
"slices"
+ "strconv"
"code.gitea.io/gitea/models/db"
project_model "code.gitea.io/gitea/models/project"
@@ -754,18 +755,14 @@ func (issue *Issue) HasOriginalAuthor() bool {
// InsertIssues insert issues to database
func InsertIssues(ctx context.Context, issues ...*Issue) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- for _, issue := range issues {
- if err := insertIssue(ctx, issue); err != nil {
- return err
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ for _, issue := range issues {
+ if err := insertIssue(ctx, issue); err != nil {
+ return err
+ }
}
- }
- return committer.Commit()
+ return nil
+ })
}
func insertIssue(ctx context.Context, issue *Issue) error {
@@ -815,7 +812,7 @@ func ChangeIssueTimeEstimate(ctx context.Context, issue *Issue, doer *user_model
Doer: doer,
Repo: issue.Repo,
Issue: issue,
- Content: fmt.Sprintf("%d", timeEstimate),
+ Content: strconv.FormatInt(timeEstimate, 10),
}
if _, err := CreateComment(ctx, opts); err != nil {
return fmt.Errorf("createComment: %w", err)
diff --git a/models/issues/issue_index.go b/models/issues/issue_index.go
index 2eb61858bf..1fe4a08a09 100644
--- a/models/issues/issue_index.go
+++ b/models/issues/issue_index.go
@@ -12,20 +12,12 @@ import (
// RecalculateIssueIndexForRepo create issue_index for repo if not exist and
// update it based on highest index of existing issues assigned to a repo
func RecalculateIssueIndexForRepo(ctx context.Context, repoID int64) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ var maxIndex int64
+ if _, err := db.GetEngine(ctx).Select(" MAX(`index`)").Table("issue").Where("repo_id=?", repoID).Get(&maxIndex); err != nil {
+ return err
+ }
- var maxIndex int64
- if _, err = db.GetEngine(ctx).Select(" MAX(`index`)").Table("issue").Where("repo_id=?", repoID).Get(&maxIndex); err != nil {
- return err
- }
-
- if err = db.SyncMaxResourceIndex(ctx, "issue_index", repoID, maxIndex); err != nil {
- return err
- }
-
- return committer.Commit()
+ return db.SyncMaxResourceIndex(ctx, "issue_index", repoID, maxIndex)
+ })
}
diff --git a/models/issues/issue_label.go b/models/issues/issue_label.go
index 10fc821454..151469a9b8 100644
--- a/models/issues/issue_label.go
+++ b/models/issues/issue_label.go
@@ -88,36 +88,28 @@ func NewIssueLabel(ctx context.Context, issue *Issue, label *Label, doer *user_m
return nil
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- if err = issue.LoadRepo(ctx); err != nil {
- return err
- }
-
- // Do NOT add invalid labels
- if issue.RepoID != label.RepoID && issue.Repo.OwnerID != label.OrgID {
- return nil
- }
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ if err = issue.LoadRepo(ctx); err != nil {
+ return err
+ }
- if err = RemoveDuplicateExclusiveIssueLabels(ctx, issue, label, doer); err != nil {
- return nil
- }
+ // Do NOT add invalid labels
+ if issue.RepoID != label.RepoID && issue.Repo.OwnerID != label.OrgID {
+ return nil
+ }
- if err = newIssueLabel(ctx, issue, label, doer); err != nil {
- return err
- }
+ if err = RemoveDuplicateExclusiveIssueLabels(ctx, issue, label, doer); err != nil {
+ return nil
+ }
- issue.isLabelsLoaded = false
- issue.Labels = nil
- if err = issue.LoadLabels(ctx); err != nil {
- return err
- }
+ if err = newIssueLabel(ctx, issue, label, doer); err != nil {
+ return err
+ }
- return committer.Commit()
+ issue.isLabelsLoaded = false
+ issue.Labels = nil
+ return issue.LoadLabels(ctx)
+ })
}
// newIssueLabels add labels to an issue. It will check if the labels are valid for the issue
@@ -151,24 +143,16 @@ func newIssueLabels(ctx context.Context, issue *Issue, labels []*Label, doer *us
// NewIssueLabels creates a list of issue-label relations.
func NewIssueLabels(ctx context.Context, issue *Issue, labels []*Label, doer *user_model.User) (err error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- if err = newIssueLabels(ctx, issue, labels, doer); err != nil {
- return err
- }
-
- // reload all labels
- issue.isLabelsLoaded = false
- issue.Labels = nil
- if err = issue.LoadLabels(ctx); err != nil {
- return err
- }
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ if err = newIssueLabels(ctx, issue, labels, doer); err != nil {
+ return err
+ }
- return committer.Commit()
+ // reload all labels
+ issue.isLabelsLoaded = false
+ issue.Labels = nil
+ return issue.LoadLabels(ctx)
+ })
}
func deleteIssueLabel(ctx context.Context, issue *Issue, label *Label, doer *user_model.User) (err error) {
@@ -206,6 +190,7 @@ func DeleteIssueLabel(ctx context.Context, issue *Issue, label *Label, doer *use
}
issue.Labels = nil
+ issue.isLabelsLoaded = false
return issue.LoadLabels(ctx)
}
@@ -364,35 +349,23 @@ func clearIssueLabels(ctx context.Context, issue *Issue, doer *user_model.User)
// ClearIssueLabels removes all issue labels as the given user.
// Triggers appropriate WebHooks, if any.
func ClearIssueLabels(ctx context.Context, issue *Issue, doer *user_model.User) (err error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- if err := issue.LoadRepo(ctx); err != nil {
- return err
- } else if err = issue.LoadPullRequest(ctx); err != nil {
- return err
- }
-
- perm, err := access_model.GetUserRepoPermission(ctx, issue.Repo, doer)
- if err != nil {
- return err
- }
- if !perm.CanWriteIssuesOrPulls(issue.IsPull) {
- return ErrRepoLabelNotExist{}
- }
-
- if err = clearIssueLabels(ctx, issue, doer); err != nil {
- return err
- }
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ if err := issue.LoadRepo(ctx); err != nil {
+ return err
+ } else if err = issue.LoadPullRequest(ctx); err != nil {
+ return err
+ }
- if err = committer.Commit(); err != nil {
- return fmt.Errorf("Commit: %w", err)
- }
+ perm, err := access_model.GetUserRepoPermission(ctx, issue.Repo, doer)
+ if err != nil {
+ return err
+ }
+ if !perm.CanWriteIssuesOrPulls(issue.IsPull) {
+ return ErrRepoLabelNotExist{}
+ }
- return nil
+ return clearIssueLabels(ctx, issue, doer)
+ })
}
type labelSorter []*Label
@@ -437,69 +410,61 @@ func RemoveDuplicateExclusiveLabels(labels []*Label) []*Label {
// ReplaceIssueLabels removes all current labels and add new labels to the issue.
// Triggers appropriate WebHooks, if any.
func ReplaceIssueLabels(ctx context.Context, issue *Issue, labels []*Label, doer *user_model.User) (err error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ if err = issue.LoadRepo(ctx); err != nil {
+ return err
+ }
- if err = issue.LoadRepo(ctx); err != nil {
- return err
- }
+ if err = issue.LoadLabels(ctx); err != nil {
+ return err
+ }
- if err = issue.LoadLabels(ctx); err != nil {
- return err
- }
+ labels = RemoveDuplicateExclusiveLabels(labels)
- labels = RemoveDuplicateExclusiveLabels(labels)
+ sort.Sort(labelSorter(labels))
+ sort.Sort(labelSorter(issue.Labels))
- sort.Sort(labelSorter(labels))
- sort.Sort(labelSorter(issue.Labels))
+ var toAdd, toRemove []*Label
- var toAdd, toRemove []*Label
+ addIndex, removeIndex := 0, 0
+ for addIndex < len(labels) && removeIndex < len(issue.Labels) {
+ addLabel := labels[addIndex]
+ removeLabel := issue.Labels[removeIndex]
+ if addLabel.ID == removeLabel.ID {
+ // Silently drop invalid labels
+ if removeLabel.RepoID != issue.RepoID && removeLabel.OrgID != issue.Repo.OwnerID {
+ toRemove = append(toRemove, removeLabel)
+ }
- addIndex, removeIndex := 0, 0
- for addIndex < len(labels) && removeIndex < len(issue.Labels) {
- addLabel := labels[addIndex]
- removeLabel := issue.Labels[removeIndex]
- if addLabel.ID == removeLabel.ID {
- // Silently drop invalid labels
- if removeLabel.RepoID != issue.RepoID && removeLabel.OrgID != issue.Repo.OwnerID {
+ addIndex++
+ removeIndex++
+ } else if addLabel.ID < removeLabel.ID {
+ // Only add if the label is valid
+ if addLabel.RepoID == issue.RepoID || addLabel.OrgID == issue.Repo.OwnerID {
+ toAdd = append(toAdd, addLabel)
+ }
+ addIndex++
+ } else {
toRemove = append(toRemove, removeLabel)
+ removeIndex++
}
-
- addIndex++
- removeIndex++
- } else if addLabel.ID < removeLabel.ID {
- // Only add if the label is valid
- if addLabel.RepoID == issue.RepoID || addLabel.OrgID == issue.Repo.OwnerID {
- toAdd = append(toAdd, addLabel)
- }
- addIndex++
- } else {
- toRemove = append(toRemove, removeLabel)
- removeIndex++
}
- }
- toAdd = append(toAdd, labels[addIndex:]...)
- toRemove = append(toRemove, issue.Labels[removeIndex:]...)
+ toAdd = append(toAdd, labels[addIndex:]...)
+ toRemove = append(toRemove, issue.Labels[removeIndex:]...)
- if len(toAdd) > 0 {
- if err = newIssueLabels(ctx, issue, toAdd, doer); err != nil {
- return fmt.Errorf("addLabels: %w", err)
+ if len(toAdd) > 0 {
+ if err = newIssueLabels(ctx, issue, toAdd, doer); err != nil {
+ return fmt.Errorf("addLabels: %w", err)
+ }
}
- }
- for _, l := range toRemove {
- if err = deleteIssueLabel(ctx, issue, l, doer); err != nil {
- return fmt.Errorf("removeLabel: %w", err)
+ for _, l := range toRemove {
+ if err = deleteIssueLabel(ctx, issue, l, doer); err != nil {
+ return fmt.Errorf("removeLabel: %w", err)
+ }
}
- }
-
- issue.Labels = nil
- if err = issue.LoadLabels(ctx); err != nil {
- return err
- }
- return committer.Commit()
+ issue.Labels = nil
+ return issue.LoadLabels(ctx)
+ })
}
diff --git a/models/issues/issue_list.go b/models/issues/issue_list.go
index 6c74b533b3..26b93189b8 100644
--- a/models/issues/issue_list.go
+++ b/models/issues/issue_list.go
@@ -42,10 +42,7 @@ func (issues IssueList) LoadRepositories(ctx context.Context) (repo_model.Reposi
repoMaps := make(map[int64]*repo_model.Repository, len(repoIDs))
left := len(repoIDs)
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
err := db.GetEngine(ctx).
In("id", repoIDs[:limit]).
Find(&repoMaps)
@@ -116,10 +113,7 @@ func (issues IssueList) LoadLabels(ctx context.Context) error {
issueIDs := issues.getIssueIDs()
left := len(issueIDs)
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
rows, err := db.GetEngine(ctx).Table("label").
Join("LEFT", "issue_label", "issue_label.label_id = label.id").
In("issue_label.issue_id", issueIDs[:limit]).
@@ -171,10 +165,7 @@ func (issues IssueList) LoadMilestones(ctx context.Context) error {
milestoneMaps := make(map[int64]*Milestone, len(milestoneIDs))
left := len(milestoneIDs)
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
err := db.GetEngine(ctx).
In("id", milestoneIDs[:limit]).
Find(&milestoneMaps)
@@ -203,10 +194,7 @@ func (issues IssueList) LoadProjects(ctx context.Context) error {
}
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
projects := make([]*projectWithIssueID, 0, limit)
err := db.GetEngine(ctx).
@@ -245,10 +233,7 @@ func (issues IssueList) LoadAssignees(ctx context.Context) error {
issueIDs := issues.getIssueIDs()
left := len(issueIDs)
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
rows, err := db.GetEngine(ctx).Table("issue_assignees").
Join("INNER", "`user`", "`user`.id = `issue_assignees`.assignee_id").
In("`issue_assignees`.issue_id", issueIDs[:limit]).OrderBy(user_model.GetOrderByName()).
@@ -306,10 +291,7 @@ func (issues IssueList) LoadPullRequests(ctx context.Context) error {
pullRequestMaps := make(map[int64]*PullRequest, len(issuesIDs))
left := len(issuesIDs)
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
rows, err := db.GetEngine(ctx).
In("issue_id", issuesIDs[:limit]).
Rows(new(PullRequest))
@@ -354,10 +336,7 @@ func (issues IssueList) LoadAttachments(ctx context.Context) (err error) {
issuesIDs := issues.getIssueIDs()
left := len(issuesIDs)
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
rows, err := db.GetEngine(ctx).
In("issue_id", issuesIDs[:limit]).
Rows(new(repo_model.Attachment))
@@ -399,10 +378,7 @@ func (issues IssueList) loadComments(ctx context.Context, cond builder.Cond) (er
issuesIDs := issues.getIssueIDs()
left := len(issuesIDs)
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
rows, err := db.GetEngine(ctx).Table("comment").
Join("INNER", "issue", "issue.id = comment.issue_id").
In("issue.id", issuesIDs[:limit]).
@@ -466,10 +442,7 @@ func (issues IssueList) loadTotalTrackedTimes(ctx context.Context) (err error) {
left := len(ids)
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
// select issue_id, sum(time) from tracked_time where issue_id in (<issue ids in current page>) group by issue_id
rows, err := db.GetEngine(ctx).Table("tracked_time").
diff --git a/models/issues/issue_list_test.go b/models/issues/issue_list_test.go
index 9069e1012d..5b4d2ca5ab 100644
--- a/models/issues/issue_list_test.go
+++ b/models/issues/issue_list_test.go
@@ -27,7 +27,7 @@ func TestIssueList_LoadRepositories(t *testing.T) {
assert.NoError(t, err)
assert.Len(t, repos, 2)
for _, issue := range issueList {
- assert.EqualValues(t, issue.RepoID, issue.Repo.ID)
+ assert.Equal(t, issue.RepoID, issue.Repo.ID)
}
}
@@ -41,28 +41,28 @@ func TestIssueList_LoadAttributes(t *testing.T) {
assert.NoError(t, issueList.LoadAttributes(db.DefaultContext))
for _, issue := range issueList {
- assert.EqualValues(t, issue.RepoID, issue.Repo.ID)
+ assert.Equal(t, issue.RepoID, issue.Repo.ID)
for _, label := range issue.Labels {
- assert.EqualValues(t, issue.RepoID, label.RepoID)
+ assert.Equal(t, issue.RepoID, label.RepoID)
unittest.AssertExistsAndLoadBean(t, &issues_model.IssueLabel{IssueID: issue.ID, LabelID: label.ID})
}
if issue.PosterID > 0 {
- assert.EqualValues(t, issue.PosterID, issue.Poster.ID)
+ assert.Equal(t, issue.PosterID, issue.Poster.ID)
}
if issue.AssigneeID > 0 {
- assert.EqualValues(t, issue.AssigneeID, issue.Assignee.ID)
+ assert.Equal(t, issue.AssigneeID, issue.Assignee.ID)
}
if issue.MilestoneID > 0 {
- assert.EqualValues(t, issue.MilestoneID, issue.Milestone.ID)
+ assert.Equal(t, issue.MilestoneID, issue.Milestone.ID)
}
if issue.IsPull {
- assert.EqualValues(t, issue.ID, issue.PullRequest.IssueID)
+ assert.Equal(t, issue.ID, issue.PullRequest.IssueID)
}
for _, attachment := range issue.Attachments {
- assert.EqualValues(t, issue.ID, attachment.IssueID)
+ assert.Equal(t, issue.ID, attachment.IssueID)
}
for _, comment := range issue.Comments {
- assert.EqualValues(t, issue.ID, comment.IssueID)
+ assert.Equal(t, issue.ID, comment.IssueID)
}
if issue.ID == int64(1) {
assert.Equal(t, int64(400), issue.TotalTrackedTime)
diff --git a/models/issues/issue_lock.go b/models/issues/issue_lock.go
index b21629b529..2e5bf64cc6 100644
--- a/models/issues/issue_lock.go
+++ b/models/issues/issue_lock.go
@@ -12,8 +12,14 @@ import (
// IssueLockOptions defines options for locking and/or unlocking an issue/PR
type IssueLockOptions struct {
- Doer *user_model.User
- Issue *Issue
+ Doer *user_model.User
+ Issue *Issue
+
+ // Reason is the doer-provided comment message for the locked issue
+ // GitHub doesn't support changing the "reasons" by config file, so GitHub has pre-defined "reason" enum values.
+ // Gitea is not like GitHub, it allows site admin to define customized "reasons" in the config file.
+ // So the API caller might not know what kind of "reasons" are valid, and the customized reasons are not translatable.
+ // To make things clear and simple: doer have the chance to use any reason they like, we do not do validation.
Reason string
}
@@ -41,26 +47,19 @@ func updateIssueLock(ctx context.Context, opts *IssueLockOptions, lock bool) err
commentType = CommentTypeUnlock
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ if err := UpdateIssueCols(ctx, opts.Issue, "is_locked"); err != nil {
+ return err
+ }
- if err := UpdateIssueCols(ctx, opts.Issue, "is_locked"); err != nil {
+ opt := &CreateCommentOptions{
+ Doer: opts.Doer,
+ Issue: opts.Issue,
+ Repo: opts.Issue.Repo,
+ Type: commentType,
+ Content: opts.Reason,
+ }
+ _, err := CreateComment(ctx, opt)
return err
- }
-
- opt := &CreateCommentOptions{
- Doer: opts.Doer,
- Issue: opts.Issue,
- Repo: opts.Issue.Repo,
- Type: commentType,
- Content: opts.Reason,
- }
- if _, err := CreateComment(ctx, opt); err != nil {
- return err
- }
-
- return committer.Commit()
+ })
}
diff --git a/models/issues/issue_search.go b/models/issues/issue_search.go
index 694b918755..79bd6a19b0 100644
--- a/models/issues/issue_search.go
+++ b/models/issues/issue_search.go
@@ -21,14 +21,16 @@ import (
"xorm.io/xorm"
)
+const ScopeSortPrefix = "scope-"
+
// IssuesOptions represents options of an issue.
-type IssuesOptions struct { //nolint
+type IssuesOptions struct { //nolint:revive // export stutter
Paginator *db.ListOptions
RepoIDs []int64 // overwrites RepoCond if the length is not 0
AllPublic bool // include also all public repositories
RepoCond builder.Cond
- AssigneeID optional.Option[int64]
- PosterID optional.Option[int64]
+ AssigneeID string // "(none)" or "(any)" or a user ID
+ PosterID string // "(none)" or "(any)" or a user ID
MentionedID int64
ReviewRequestedID int64
ReviewedID int64
@@ -70,11 +72,24 @@ func (o *IssuesOptions) Copy(edit ...func(options *IssuesOptions)) *IssuesOption
// applySorts sort an issues-related session based on the provided
// sortType string
func applySorts(sess *xorm.Session, sortType string, priorityRepoID int64) {
+ // Since this sortType is dynamically created, it has to be treated specially.
+ if after, ok := strings.CutPrefix(sortType, ScopeSortPrefix); ok {
+ scope := after
+ sess.Join("LEFT", "issue_label", "issue.id = issue_label.issue_id")
+ // "exclusive_order=0" means "no order is set", so exclude it from the JOIN criteria and then "LEFT JOIN" result is also null
+ sess.Join("LEFT", "label", "label.id = issue_label.label_id AND label.exclusive_order <> 0 AND label.name LIKE ?", scope+"/%")
+ // Use COALESCE to make sure we sort NULL last regardless of backend DB (2147483647 == max int)
+ sess.OrderBy("COALESCE(label.exclusive_order, 2147483647) ASC").Desc("issue.id")
+ return
+ }
+
switch sortType {
case "oldest":
sess.Asc("issue.created_unix").Asc("issue.id")
case "recentupdate":
sess.Desc("issue.updated_unix").Desc("issue.created_unix").Desc("issue.id")
+ case "recentclose":
+ sess.Desc("issue.closed_unix").Desc("issue.created_unix").Desc("issue.id")
case "leastupdate":
sess.Asc("issue.updated_unix").Asc("issue.created_unix").Asc("issue.id")
case "mostcomment":
@@ -356,26 +371,25 @@ func issuePullAccessibleRepoCond(repoIDstr string, userID int64, owner *user_mod
return cond
}
-func applyAssigneeCondition(sess *xorm.Session, assigneeID optional.Option[int64]) {
+func applyAssigneeCondition(sess *xorm.Session, assigneeID string) {
// old logic: 0 is also treated as "not filtering assignee", because the "assignee" was read as FormInt64
- if !assigneeID.Has() || assigneeID.Value() == 0 {
- return
- }
- if assigneeID.Value() == db.NoConditionID {
+ if assigneeID == "(none)" {
sess.Where("issue.id NOT IN (SELECT issue_id FROM issue_assignees)")
- } else {
+ } else if assigneeID == "(any)" {
+ sess.Where("issue.id IN (SELECT issue_id FROM issue_assignees)")
+ } else if assigneeIDInt64, _ := strconv.ParseInt(assigneeID, 10, 64); assigneeIDInt64 > 0 {
sess.Join("INNER", "issue_assignees", "issue.id = issue_assignees.issue_id").
- And("issue_assignees.assignee_id = ?", assigneeID.Value())
+ And("issue_assignees.assignee_id = ?", assigneeIDInt64)
}
}
-func applyPosterCondition(sess *xorm.Session, posterID optional.Option[int64]) {
- if !posterID.Has() {
- return
- }
- // poster doesn't need to support db.NoConditionID(-1), so just use the value as-is
- if posterID.Has() {
- sess.And("issue.poster_id=?", posterID.Value())
+func applyPosterCondition(sess *xorm.Session, posterID string) {
+ // Actually every issue has a poster.
+ // The "(none)" is for internal usage only: when doer tries to search non-existing user as poster, use "(none)" to return empty result.
+ if posterID == "(none)" {
+ sess.And("issue.poster_id=0")
+ } else if posterIDInt64, _ := strconv.ParseInt(posterID, 10, 64); posterIDInt64 > 0 {
+ sess.And("issue.poster_id=?", posterIDInt64)
}
}
diff --git a/models/issues/issue_stats.go b/models/issues/issue_stats.go
index 50409fbbd8..adedaa3d3a 100644
--- a/models/issues/issue_stats.go
+++ b/models/issues/issue_stats.go
@@ -94,10 +94,7 @@ func GetIssueStats(ctx context.Context, opts *IssuesOptions) (*IssueStats, error
// ids in a temporary table and join from them.
accum := &IssueStats{}
for i := 0; i < len(opts.IssueIDs); {
- chunk := i + MaxQueryParameters
- if chunk > len(opts.IssueIDs) {
- chunk = len(opts.IssueIDs)
- }
+ chunk := min(i+MaxQueryParameters, len(opts.IssueIDs))
stats, err := getIssueStatsChunk(ctx, opts, opts.IssueIDs[i:chunk])
if err != nil {
return nil, err
diff --git a/models/issues/issue_test.go b/models/issues/issue_test.go
index 3f76a81bb6..1c5db55bbc 100644
--- a/models/issues/issue_test.go
+++ b/models/issues/issue_test.go
@@ -5,6 +5,7 @@ package issues_test
import (
"fmt"
+ "slices"
"sort"
"sync"
"testing"
@@ -15,7 +16,6 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
- "code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/setting"
"github.com/stretchr/testify/assert"
@@ -142,8 +142,8 @@ func TestUpdateIssueCols(t *testing.T) {
then := time.Now().Unix()
updatedIssue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: issue.ID})
- assert.EqualValues(t, newTitle, updatedIssue.Title)
- assert.EqualValues(t, prevContent, updatedIssue.Content)
+ assert.Equal(t, newTitle, updatedIssue.Title)
+ assert.Equal(t, prevContent, updatedIssue.Content)
unittest.AssertInt64InRange(t, now, then, int64(updatedIssue.UpdatedUnix))
}
@@ -155,7 +155,7 @@ func TestIssues(t *testing.T) {
}{
{
issues_model.IssuesOptions{
- AssigneeID: optional.Some(int64(1)),
+ AssigneeID: "1",
SortType: "oldest",
},
[]int64{1, 6},
@@ -202,7 +202,7 @@ func TestIssues(t *testing.T) {
assert.NoError(t, err)
if assert.Len(t, issues, len(test.ExpectedIssueIDs)) {
for i, issue := range issues {
- assert.EqualValues(t, test.ExpectedIssueIDs[i], issue.ID)
+ assert.Equal(t, test.ExpectedIssueIDs[i], issue.ID)
}
}
}
@@ -235,10 +235,10 @@ func testInsertIssue(t *testing.T, title, content string, expectIndex int64) *is
has, err := db.GetEngine(db.DefaultContext).ID(issue.ID).Get(&newIssue)
assert.NoError(t, err)
assert.True(t, has)
- assert.EqualValues(t, issue.Title, newIssue.Title)
- assert.EqualValues(t, issue.Content, newIssue.Content)
+ assert.Equal(t, issue.Title, newIssue.Title)
+ assert.Equal(t, issue.Content, newIssue.Content)
if expectIndex > 0 {
- assert.EqualValues(t, expectIndex, newIssue.Index)
+ assert.Equal(t, expectIndex, newIssue.Index)
}
})
return &newIssue
@@ -271,8 +271,8 @@ func TestIssue_ResolveMentions(t *testing.T) {
for i, user := range resolved {
ids[i] = user.ID
}
- sort.Slice(ids, func(i, j int) bool { return ids[i] < ids[j] })
- assert.EqualValues(t, expected, ids)
+ slices.Sort(ids)
+ assert.Equal(t, expected, ids)
}
// Public repo, existing user
@@ -293,7 +293,7 @@ func TestResourceIndex(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
var wg sync.WaitGroup
- for i := 0; i < 100; i++ {
+ for i := range 100 {
wg.Add(1)
go func(i int) {
testInsertIssue(t, fmt.Sprintf("issue %d", i+1), "my issue", 0)
@@ -315,7 +315,7 @@ func TestCorrectIssueStats(t *testing.T) {
issueAmount := issues_model.MaxQueryParameters + 10
var wg sync.WaitGroup
- for i := 0; i < issueAmount; i++ {
+ for i := range issueAmount {
wg.Add(1)
go func(i int) {
testInsertIssue(t, fmt.Sprintf("Issue %d", i+1), "Bugs are nasty", 0)
@@ -393,28 +393,28 @@ func TestIssueLoadAttributes(t *testing.T) {
for _, issue := range issueList {
assert.NoError(t, issue.LoadAttributes(db.DefaultContext))
- assert.EqualValues(t, issue.RepoID, issue.Repo.ID)
+ assert.Equal(t, issue.RepoID, issue.Repo.ID)
for _, label := range issue.Labels {
- assert.EqualValues(t, issue.RepoID, label.RepoID)
+ assert.Equal(t, issue.RepoID, label.RepoID)
unittest.AssertExistsAndLoadBean(t, &issues_model.IssueLabel{IssueID: issue.ID, LabelID: label.ID})
}
if issue.PosterID > 0 {
- assert.EqualValues(t, issue.PosterID, issue.Poster.ID)
+ assert.Equal(t, issue.PosterID, issue.Poster.ID)
}
if issue.AssigneeID > 0 {
- assert.EqualValues(t, issue.AssigneeID, issue.Assignee.ID)
+ assert.Equal(t, issue.AssigneeID, issue.Assignee.ID)
}
if issue.MilestoneID > 0 {
- assert.EqualValues(t, issue.MilestoneID, issue.Milestone.ID)
+ assert.Equal(t, issue.MilestoneID, issue.Milestone.ID)
}
if issue.IsPull {
- assert.EqualValues(t, issue.ID, issue.PullRequest.IssueID)
+ assert.Equal(t, issue.ID, issue.PullRequest.IssueID)
}
for _, attachment := range issue.Attachments {
- assert.EqualValues(t, issue.ID, attachment.IssueID)
+ assert.Equal(t, issue.ID, attachment.IssueID)
}
for _, comment := range issue.Comments {
- assert.EqualValues(t, issue.ID, comment.IssueID)
+ assert.Equal(t, issue.ID, comment.IssueID)
}
if issue.ID == int64(1) {
assert.Equal(t, int64(400), issue.TotalTrackedTime)
diff --git a/models/issues/issue_update.go b/models/issues/issue_update.go
index 7b3fe04aa5..1c16817491 100644
--- a/models/issues/issue_update.go
+++ b/models/issues/issue_update.go
@@ -5,16 +5,14 @@ package issues
import (
"context"
+ "errors"
"fmt"
"strings"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/organization"
- "code.gitea.io/gitea/models/perm"
access_model "code.gitea.io/gitea/models/perm/access"
- project_model "code.gitea.io/gitea/models/project"
repo_model "code.gitea.io/gitea/models/repo"
- system_model "code.gitea.io/gitea/models/system"
"code.gitea.io/gitea/models/unit"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
@@ -169,20 +167,9 @@ func CloseIssue(ctx context.Context, issue *Issue, doer *user_model.User) (*Comm
return nil, err
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return nil, err
- }
- defer committer.Close()
-
- comment, err := SetIssueAsClosed(ctx, issue, doer, false)
- if err != nil {
- return nil, err
- }
- if err := committer.Commit(); err != nil {
- return nil, err
- }
- return comment, nil
+ return db.WithTx2(ctx, func(ctx context.Context) (*Comment, error) {
+ return SetIssueAsClosed(ctx, issue, doer, false)
+ })
}
// ReopenIssue changes issue status to open.
@@ -194,88 +181,64 @@ func ReopenIssue(ctx context.Context, issue *Issue, doer *user_model.User) (*Com
return nil, err
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return nil, err
- }
- defer committer.Close()
-
- comment, err := setIssueAsReopen(ctx, issue, doer)
- if err != nil {
- return nil, err
- }
- if err := committer.Commit(); err != nil {
- return nil, err
- }
- return comment, nil
+ return db.WithTx2(ctx, func(ctx context.Context) (*Comment, error) {
+ return setIssueAsReopen(ctx, issue, doer)
+ })
}
// ChangeIssueTitle changes the title of this issue, as the given user.
func ChangeIssueTitle(ctx context.Context, issue *Issue, doer *user_model.User, oldTitle string) (err error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- issue.Title = util.EllipsisDisplayString(issue.Title, 255)
- if err = UpdateIssueCols(ctx, issue, "name"); err != nil {
- return fmt.Errorf("updateIssueCols: %w", err)
- }
-
- if err = issue.LoadRepo(ctx); err != nil {
- return fmt.Errorf("loadRepo: %w", err)
- }
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ issue.Title = util.EllipsisDisplayString(issue.Title, 255)
+ if err = UpdateIssueCols(ctx, issue, "name"); err != nil {
+ return fmt.Errorf("updateIssueCols: %w", err)
+ }
- opts := &CreateCommentOptions{
- Type: CommentTypeChangeTitle,
- Doer: doer,
- Repo: issue.Repo,
- Issue: issue,
- OldTitle: oldTitle,
- NewTitle: issue.Title,
- }
- if _, err = CreateComment(ctx, opts); err != nil {
- return fmt.Errorf("createComment: %w", err)
- }
- if err = issue.AddCrossReferences(ctx, doer, true); err != nil {
- return err
- }
+ if err = issue.LoadRepo(ctx); err != nil {
+ return fmt.Errorf("loadRepo: %w", err)
+ }
- return committer.Commit()
+ opts := &CreateCommentOptions{
+ Type: CommentTypeChangeTitle,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: issue,
+ OldTitle: oldTitle,
+ NewTitle: issue.Title,
+ }
+ if _, err = CreateComment(ctx, opts); err != nil {
+ return fmt.Errorf("createComment: %w", err)
+ }
+ return issue.AddCrossReferences(ctx, doer, true)
+ })
}
// ChangeIssueRef changes the branch of this issue, as the given user.
func ChangeIssueRef(ctx context.Context, issue *Issue, doer *user_model.User, oldRef string) (err error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- if err = UpdateIssueCols(ctx, issue, "ref"); err != nil {
- return fmt.Errorf("updateIssueCols: %w", err)
- }
-
- if err = issue.LoadRepo(ctx); err != nil {
- return fmt.Errorf("loadRepo: %w", err)
- }
- oldRefFriendly := strings.TrimPrefix(oldRef, git.BranchPrefix)
- newRefFriendly := strings.TrimPrefix(issue.Ref, git.BranchPrefix)
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ if err = UpdateIssueCols(ctx, issue, "ref"); err != nil {
+ return fmt.Errorf("updateIssueCols: %w", err)
+ }
- opts := &CreateCommentOptions{
- Type: CommentTypeChangeIssueRef,
- Doer: doer,
- Repo: issue.Repo,
- Issue: issue,
- OldRef: oldRefFriendly,
- NewRef: newRefFriendly,
- }
- if _, err = CreateComment(ctx, opts); err != nil {
- return fmt.Errorf("createComment: %w", err)
- }
+ if err = issue.LoadRepo(ctx); err != nil {
+ return fmt.Errorf("loadRepo: %w", err)
+ }
+ oldRefFriendly := strings.TrimPrefix(oldRef, git.BranchPrefix)
+ newRefFriendly := strings.TrimPrefix(issue.Ref, git.BranchPrefix)
- return committer.Commit()
+ opts := &CreateCommentOptions{
+ Type: CommentTypeChangeIssueRef,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: issue,
+ OldRef: oldRefFriendly,
+ NewRef: newRefFriendly,
+ }
+ if _, err = CreateComment(ctx, opts); err != nil {
+ return fmt.Errorf("createComment: %w", err)
+ }
+ return nil
+ })
}
// AddDeletePRBranchComment adds delete branch comment for pull request issue
@@ -297,64 +260,56 @@ func AddDeletePRBranchComment(ctx context.Context, doer *user_model.User, repo *
// UpdateIssueAttachments update attachments by UUIDs for the issue
func UpdateIssueAttachments(ctx context.Context, issueID int64, uuids []string) (err error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
- attachments, err := repo_model.GetAttachmentsByUUIDs(ctx, uuids)
- if err != nil {
- return fmt.Errorf("getAttachmentsByUUIDs [uuids: %v]: %w", uuids, err)
- }
- for i := 0; i < len(attachments); i++ {
- attachments[i].IssueID = issueID
- if err := repo_model.UpdateAttachment(ctx, attachments[i]); err != nil {
- return fmt.Errorf("update attachment [id: %d]: %w", attachments[i].ID, err)
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ attachments, err := repo_model.GetAttachmentsByUUIDs(ctx, uuids)
+ if err != nil {
+ return fmt.Errorf("getAttachmentsByUUIDs [uuids: %v]: %w", uuids, err)
}
- }
- return committer.Commit()
+ for i := range attachments {
+ attachments[i].IssueID = issueID
+ if err := repo_model.UpdateAttachment(ctx, attachments[i]); err != nil {
+ return fmt.Errorf("update attachment [id: %d]: %w", attachments[i].ID, err)
+ }
+ }
+ return nil
+ })
}
// ChangeIssueContent changes issue content, as the given user.
func ChangeIssueContent(ctx context.Context, issue *Issue, doer *user_model.User, content string, contentVersion int) (err error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- hasContentHistory, err := HasIssueContentHistory(ctx, issue.ID, 0)
- if err != nil {
- return fmt.Errorf("HasIssueContentHistory: %w", err)
- }
- if !hasContentHistory {
- if err = SaveIssueContentHistory(ctx, issue.PosterID, issue.ID, 0,
- issue.CreatedUnix, issue.Content, true); err != nil {
- return fmt.Errorf("SaveIssueContentHistory: %w", err)
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ hasContentHistory, err := HasIssueContentHistory(ctx, issue.ID, 0)
+ if err != nil {
+ return fmt.Errorf("HasIssueContentHistory: %w", err)
+ }
+ if !hasContentHistory {
+ if err = SaveIssueContentHistory(ctx, issue.PosterID, issue.ID, 0,
+ issue.CreatedUnix, issue.Content, true); err != nil {
+ return fmt.Errorf("SaveIssueContentHistory: %w", err)
+ }
}
- }
- issue.Content = content
- issue.ContentVersion = contentVersion + 1
+ issue.Content = content
+ issue.ContentVersion = contentVersion + 1
- affected, err := db.GetEngine(ctx).ID(issue.ID).Cols("content", "content_version").Where("content_version = ?", contentVersion).Update(issue)
- if err != nil {
- return err
- }
- if affected == 0 {
- return ErrIssueAlreadyChanged
- }
-
- if err = SaveIssueContentHistory(ctx, doer.ID, issue.ID, 0,
- timeutil.TimeStampNow(), issue.Content, false); err != nil {
- return fmt.Errorf("SaveIssueContentHistory: %w", err)
- }
+ affected, err := db.GetEngine(ctx).ID(issue.ID).Cols("content", "content_version").Where("content_version = ?", contentVersion).Update(issue)
+ if err != nil {
+ return err
+ }
+ if affected == 0 {
+ return ErrIssueAlreadyChanged
+ }
- if err = issue.AddCrossReferences(ctx, doer, true); err != nil {
- return fmt.Errorf("addCrossReferences: %w", err)
- }
+ if err = SaveIssueContentHistory(ctx, doer.ID, issue.ID, 0,
+ timeutil.TimeStampNow(), issue.Content, false); err != nil {
+ return fmt.Errorf("SaveIssueContentHistory: %w", err)
+ }
- return committer.Commit()
+ if err = issue.AddCrossReferences(ctx, doer, true); err != nil {
+ return fmt.Errorf("addCrossReferences: %w", err)
+ }
+ return nil
+ })
}
// NewIssueOptions represents the options of a new issue.
@@ -386,10 +341,10 @@ func NewIssueWithIndex(ctx context.Context, doer *user_model.User, opts NewIssue
}
if opts.Issue.Index <= 0 {
- return fmt.Errorf("no issue index provided")
+ return errors.New("no issue index provided")
}
if opts.Issue.ID > 0 {
- return fmt.Errorf("issue exist")
+ return errors.New("issue exist")
}
if _, err := e.Insert(opts.Issue); err != nil {
@@ -514,23 +469,19 @@ func UpdateIssueDeadline(ctx context.Context, issue *Issue, deadlineUnix timeuti
if issue.DeadlineUnix == deadlineUnix {
return nil
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
- // Update the deadline
- if err = UpdateIssueCols(ctx, &Issue{ID: issue.ID, DeadlineUnix: deadlineUnix}, "deadline_unix"); err != nil {
- return err
- }
-
- // Make the comment
- if _, err = createDeadlineComment(ctx, doer, issue, deadlineUnix); err != nil {
- return fmt.Errorf("createRemovedDueDateComment: %w", err)
- }
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ // Update the deadline
+ if err = UpdateIssueCols(ctx, &Issue{ID: issue.ID, DeadlineUnix: deadlineUnix}, "deadline_unix"); err != nil {
+ return err
+ }
- return committer.Commit()
+ // Make the comment
+ if _, err = createDeadlineComment(ctx, doer, issue, deadlineUnix); err != nil {
+ return fmt.Errorf("createRemovedDueDateComment: %w", err)
+ }
+ return nil
+ })
}
// FindAndUpdateIssueMentions finds users mentioned in the given content string, and saves them in the database.
@@ -611,7 +562,7 @@ func ResolveIssueMentionsByVisibility(ctx context.Context, issue *Issue, doer *u
unittype = unit.TypePullRequests
}
for _, team := range teams {
- if team.AccessMode >= perm.AccessModeAdmin {
+ if team.HasAdminAccess() {
checked = append(checked, team.ID)
resolved[issue.Repo.Owner.LowerName+"/"+team.LowerName] = true
continue
@@ -715,137 +666,13 @@ func UpdateReactionsMigrationsByType(ctx context.Context, gitServiceType api.Git
return err
}
-// DeleteIssuesByRepoID deletes issues by repositories id
-func DeleteIssuesByRepoID(ctx context.Context, repoID int64) (attachmentPaths []string, err error) {
- // MariaDB has a performance bug: https://jira.mariadb.org/browse/MDEV-16289
- // so here it uses "DELETE ... WHERE IN" with pre-queried IDs.
- sess := db.GetEngine(ctx)
-
- for {
- issueIDs := make([]int64, 0, db.DefaultMaxInSize)
-
- err := sess.Table(&Issue{}).Where("repo_id = ?", repoID).OrderBy("id").Limit(db.DefaultMaxInSize).Cols("id").Find(&issueIDs)
- if err != nil {
- return nil, err
- }
-
- if len(issueIDs) == 0 {
- break
- }
-
- // Delete content histories
- _, err = sess.In("issue_id", issueIDs).Delete(&ContentHistory{})
- if err != nil {
- return nil, err
- }
-
- // Delete comments and attachments
- _, err = sess.In("issue_id", issueIDs).Delete(&Comment{})
- if err != nil {
- return nil, err
- }
-
- // Dependencies for issues in this repository
- _, err = sess.In("issue_id", issueIDs).Delete(&IssueDependency{})
- if err != nil {
- return nil, err
- }
-
- // Delete dependencies for issues in other repositories
- _, err = sess.In("dependency_id", issueIDs).Delete(&IssueDependency{})
- if err != nil {
- return nil, err
- }
-
- _, err = sess.In("issue_id", issueIDs).Delete(&IssueUser{})
- if err != nil {
- return nil, err
- }
-
- _, err = sess.In("issue_id", issueIDs).Delete(&Reaction{})
- if err != nil {
- return nil, err
- }
-
- _, err = sess.In("issue_id", issueIDs).Delete(&IssueWatch{})
- if err != nil {
- return nil, err
- }
-
- _, err = sess.In("issue_id", issueIDs).Delete(&Stopwatch{})
- if err != nil {
- return nil, err
- }
-
- _, err = sess.In("issue_id", issueIDs).Delete(&TrackedTime{})
- if err != nil {
- return nil, err
- }
-
- _, err = sess.In("issue_id", issueIDs).Delete(&project_model.ProjectIssue{})
- if err != nil {
- return nil, err
- }
-
- _, err = sess.In("dependent_issue_id", issueIDs).Delete(&Comment{})
- if err != nil {
- return nil, err
- }
-
- var attachments []*repo_model.Attachment
- err = sess.In("issue_id", issueIDs).Find(&attachments)
- if err != nil {
- return nil, err
- }
-
- for j := range attachments {
- attachmentPaths = append(attachmentPaths, attachments[j].RelativePath())
- }
-
- _, err = sess.In("issue_id", issueIDs).Delete(&repo_model.Attachment{})
- if err != nil {
- return nil, err
- }
-
- _, err = sess.In("id", issueIDs).Delete(&Issue{})
- if err != nil {
- return nil, err
- }
- }
-
- return attachmentPaths, err
-}
-
-// DeleteOrphanedIssues delete issues without a repo
-func DeleteOrphanedIssues(ctx context.Context) error {
- var attachmentPaths []string
- err := db.WithTx(ctx, func(ctx context.Context) error {
- var ids []int64
-
- if err := db.GetEngine(ctx).Table("issue").Distinct("issue.repo_id").
- Join("LEFT", "repository", "issue.repo_id=repository.id").
- Where(builder.IsNull{"repository.id"}).GroupBy("issue.repo_id").
- Find(&ids); err != nil {
- return err
- }
-
- for i := range ids {
- paths, err := DeleteIssuesByRepoID(ctx, ids[i])
- if err != nil {
- return err
- }
- attachmentPaths = append(attachmentPaths, paths...)
- }
-
- return nil
- })
- if err != nil {
- return err
- }
-
- // Remove issue attachment files.
- for i := range attachmentPaths {
- system_model.RemoveAllWithNotice(ctx, "Delete issue attachment", attachmentPaths[i])
+func GetOrphanedIssueRepoIDs(ctx context.Context) ([]int64, error) {
+ var repoIDs []int64
+ if err := db.GetEngine(ctx).Table("issue").Distinct("issue.repo_id").
+ Join("LEFT", "repository", "issue.repo_id=repository.id").
+ Where(builder.IsNull{"repository.id"}).
+ Find(&repoIDs); err != nil {
+ return nil, err
}
- return nil
+ return repoIDs, nil
}
diff --git a/models/issues/issue_xref.go b/models/issues/issue_xref.go
index e2e35859df..f8495929cf 100644
--- a/models/issues/issue_xref.go
+++ b/models/issues/issue_xref.go
@@ -235,7 +235,7 @@ func (issue *Issue) verifyReferencedIssue(stdCtx context.Context, ctx *crossRefe
// AddCrossReferences add cross references
func (c *Comment) AddCrossReferences(stdCtx context.Context, doer *user_model.User, removeOld bool) error {
- if c.Type != CommentTypeCode && c.Type != CommentTypeComment {
+ if !c.Type.HasContentSupport() {
return nil
}
if err := c.LoadIssue(stdCtx); err != nil {
diff --git a/models/issues/label.go b/models/issues/label.go
index 8a5d9321cc..25d6f1303e 100644
--- a/models/issues/label.go
+++ b/models/issues/label.go
@@ -87,6 +87,7 @@ type Label struct {
OrgID int64 `xorm:"INDEX"`
Name string
Exclusive bool
+ ExclusiveOrder int `xorm:"DEFAULT 0"` // 0 means no exclusive order
Description string
Color string `xorm:"VARCHAR(7)"`
NumIssues int
@@ -208,24 +209,20 @@ func NewLabel(ctx context.Context, l *Label) error {
// NewLabels creates new labels
func NewLabels(ctx context.Context, labels ...*Label) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- for _, l := range labels {
- color, err := label.NormalizeColor(l.Color)
- if err != nil {
- return err
- }
- l.Color = color
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ for _, l := range labels {
+ color, err := label.NormalizeColor(l.Color)
+ if err != nil {
+ return err
+ }
+ l.Color = color
- if err := db.Insert(ctx, l); err != nil {
- return err
+ if err := db.Insert(ctx, l); err != nil {
+ return err
+ }
}
- }
- return committer.Commit()
+ return nil
+ })
}
// UpdateLabel updates label information.
@@ -236,7 +233,7 @@ func UpdateLabel(ctx context.Context, l *Label) error {
}
l.Color = color
- return updateLabelCols(ctx, l, "name", "description", "color", "exclusive", "archived_unix")
+ return updateLabelCols(ctx, l, "name", "description", "color", "exclusive", "exclusive_order", "archived_unix")
}
// DeleteLabel delete a label
@@ -249,35 +246,26 @@ func DeleteLabel(ctx context.Context, id, labelID int64) error {
return err
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- sess := db.GetEngine(ctx)
-
- if l.BelongsToOrg() && l.OrgID != id {
- return nil
- }
- if l.BelongsToRepo() && l.RepoID != id {
- return nil
- }
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ if l.BelongsToOrg() && l.OrgID != id {
+ return nil
+ }
+ if l.BelongsToRepo() && l.RepoID != id {
+ return nil
+ }
- if _, err = db.DeleteByID[Label](ctx, labelID); err != nil {
- return err
- } else if _, err = sess.
- Where("label_id = ?", labelID).
- Delete(new(IssueLabel)); err != nil {
- return err
- }
+ if _, err = db.DeleteByID[Label](ctx, labelID); err != nil {
+ return err
+ } else if _, err = db.GetEngine(ctx).
+ Where("label_id = ?", labelID).
+ Delete(new(IssueLabel)); err != nil {
+ return err
+ }
- // delete comments about now deleted label_id
- if _, err = sess.Where("label_id = ?", labelID).Cols("label_id").Delete(&Comment{}); err != nil {
+ // delete comments about now deleted label_id
+ _, err = db.GetEngine(ctx).Where("label_id = ?", labelID).Cols("label_id").Delete(&Comment{})
return err
- }
-
- return committer.Commit()
+ })
}
// GetLabelByID returns a label by given ID.
diff --git a/models/issues/label_test.go b/models/issues/label_test.go
index 185fa11bbc..226036d543 100644
--- a/models/issues/label_test.go
+++ b/models/issues/label_test.go
@@ -20,7 +20,7 @@ func TestLabel_CalOpenIssues(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
label := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 1})
label.CalOpenIssues()
- assert.EqualValues(t, 2, label.NumOpenIssues)
+ assert.Equal(t, 2, label.NumOpenIssues)
}
func TestLabel_LoadSelectedLabelsAfterClick(t *testing.T) {
@@ -154,7 +154,7 @@ func TestGetLabelsByRepoID(t *testing.T) {
assert.NoError(t, err)
assert.Len(t, labels, len(expectedIssueIDs))
for i, label := range labels {
- assert.EqualValues(t, expectedIssueIDs[i], label.ID)
+ assert.Equal(t, expectedIssueIDs[i], label.ID)
}
}
testSuccess(1, "leastissues", []int64{2, 1})
@@ -221,7 +221,7 @@ func TestGetLabelsByOrgID(t *testing.T) {
assert.NoError(t, err)
assert.Len(t, labels, len(expectedIssueIDs))
for i, label := range labels {
- assert.EqualValues(t, expectedIssueIDs[i], label.ID)
+ assert.Equal(t, expectedIssueIDs[i], label.ID)
}
}
testSuccess(3, "leastissues", []int64{3, 4})
@@ -267,10 +267,10 @@ func TestUpdateLabel(t *testing.T) {
label.Name = update.Name
assert.NoError(t, issues_model.UpdateLabel(db.DefaultContext, update))
newLabel := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 1})
- assert.EqualValues(t, label.ID, newLabel.ID)
- assert.EqualValues(t, label.Color, newLabel.Color)
- assert.EqualValues(t, label.Name, newLabel.Name)
- assert.EqualValues(t, label.Description, newLabel.Description)
+ assert.Equal(t, label.ID, newLabel.ID)
+ assert.Equal(t, label.Color, newLabel.Color)
+ assert.Equal(t, label.Name, newLabel.Name)
+ assert.Equal(t, label.Description, newLabel.Description)
assert.EqualValues(t, 0, newLabel.ArchivedUnix)
unittest.CheckConsistencyFor(t, &issues_model.Label{}, &repo_model.Repository{})
}
@@ -313,7 +313,7 @@ func TestNewIssueLabel(t *testing.T) {
Content: "1",
})
label = unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 2})
- assert.EqualValues(t, prevNumIssues+1, label.NumIssues)
+ assert.Equal(t, prevNumIssues+1, label.NumIssues)
// re-add existing IssueLabel
assert.NoError(t, issues_model.NewIssueLabel(db.DefaultContext, issue, label, doer))
@@ -366,11 +366,11 @@ func TestNewIssueLabels(t *testing.T) {
})
unittest.AssertExistsAndLoadBean(t, &issues_model.IssueLabel{IssueID: issue.ID, LabelID: label1.ID})
label1 = unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 1})
- assert.EqualValues(t, 3, label1.NumIssues)
- assert.EqualValues(t, 1, label1.NumClosedIssues)
+ assert.Equal(t, 3, label1.NumIssues)
+ assert.Equal(t, 1, label1.NumClosedIssues)
label2 = unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 2})
- assert.EqualValues(t, 1, label2.NumIssues)
- assert.EqualValues(t, 1, label2.NumClosedIssues)
+ assert.Equal(t, 1, label2.NumIssues)
+ assert.Equal(t, 1, label2.NumClosedIssues)
// corner case: test empty slice
assert.NoError(t, issues_model.NewIssueLabels(db.DefaultContext, issue, []*issues_model.Label{}, doer))
@@ -408,8 +408,8 @@ func TestDeleteIssueLabel(t *testing.T) {
LabelID: labelID,
}, `content=''`)
label = unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: labelID})
- assert.EqualValues(t, expectedNumIssues, label.NumIssues)
- assert.EqualValues(t, expectedNumClosedIssues, label.NumClosedIssues)
+ assert.Equal(t, expectedNumIssues, label.NumIssues)
+ assert.Equal(t, expectedNumClosedIssues, label.NumClosedIssues)
}
testSuccess(1, 1, 2)
testSuccess(2, 5, 2)
diff --git a/models/issues/milestone.go b/models/issues/milestone.go
index 4c9bae58f7..373f39f4ff 100644
--- a/models/issues/milestone.go
+++ b/models/issues/milestone.go
@@ -105,22 +105,16 @@ func (m *Milestone) State() api.StateType {
// NewMilestone creates new milestone of repository.
func NewMilestone(ctx context.Context, m *Milestone) (err error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ m.Name = strings.TrimSpace(m.Name)
- m.Name = strings.TrimSpace(m.Name)
-
- if err = db.Insert(ctx, m); err != nil {
- return err
- }
+ if err = db.Insert(ctx, m); err != nil {
+ return err
+ }
- if _, err = db.Exec(ctx, "UPDATE `repository` SET num_milestones = num_milestones + 1 WHERE id = ?", m.RepoID); err != nil {
+ _, err = db.Exec(ctx, "UPDATE `repository` SET num_milestones = num_milestones + 1 WHERE id = ?", m.RepoID)
return err
- }
- return committer.Commit()
+ })
}
// HasMilestoneByRepoID returns if the milestone exists in the repository.
@@ -155,28 +149,23 @@ func GetMilestoneByRepoIDANDName(ctx context.Context, repoID int64, name string)
// UpdateMilestone updates information of given milestone.
func UpdateMilestone(ctx context.Context, m *Milestone, oldIsClosed bool) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- if m.IsClosed && !oldIsClosed {
- m.ClosedDateUnix = timeutil.TimeStampNow()
- }
-
- if err := updateMilestone(ctx, m); err != nil {
- return err
- }
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ if m.IsClosed && !oldIsClosed {
+ m.ClosedDateUnix = timeutil.TimeStampNow()
+ }
- // if IsClosed changed, update milestone numbers of repository
- if oldIsClosed != m.IsClosed {
- if err := updateRepoMilestoneNum(ctx, m.RepoID); err != nil {
+ if err := updateMilestone(ctx, m); err != nil {
return err
}
- }
- return committer.Commit()
+ // if IsClosed changed, update milestone numbers of repository
+ if oldIsClosed != m.IsClosed {
+ if err := updateRepoMilestoneNum(ctx, m.RepoID); err != nil {
+ return err
+ }
+ }
+ return nil
+ })
}
func updateMilestone(ctx context.Context, m *Milestone) error {
@@ -213,44 +202,28 @@ func UpdateMilestoneCounters(ctx context.Context, id int64) error {
// ChangeMilestoneStatusByRepoIDAndID changes a milestone open/closed status if the milestone ID is in the repo.
func ChangeMilestoneStatusByRepoIDAndID(ctx context.Context, repoID, milestoneID int64, isClosed bool) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- m := &Milestone{
- ID: milestoneID,
- RepoID: repoID,
- }
-
- has, err := db.GetEngine(ctx).ID(milestoneID).Where("repo_id = ?", repoID).Get(m)
- if err != nil {
- return err
- } else if !has {
- return ErrMilestoneNotExist{ID: milestoneID, RepoID: repoID}
- }
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ m := &Milestone{
+ ID: milestoneID,
+ RepoID: repoID,
+ }
- if err := changeMilestoneStatus(ctx, m, isClosed); err != nil {
- return err
- }
+ has, err := db.GetEngine(ctx).ID(milestoneID).Where("repo_id = ?", repoID).Get(m)
+ if err != nil {
+ return err
+ } else if !has {
+ return ErrMilestoneNotExist{ID: milestoneID, RepoID: repoID}
+ }
- return committer.Commit()
+ return changeMilestoneStatus(ctx, m, isClosed)
+ })
}
// ChangeMilestoneStatus changes the milestone open/closed status.
func ChangeMilestoneStatus(ctx context.Context, m *Milestone, isClosed bool) (err error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- if err := changeMilestoneStatus(ctx, m, isClosed); err != nil {
- return err
- }
-
- return committer.Commit()
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ return changeMilestoneStatus(ctx, m, isClosed)
+ })
}
func changeMilestoneStatus(ctx context.Context, m *Milestone, isClosed bool) error {
@@ -284,40 +257,34 @@ func DeleteMilestoneByRepoID(ctx context.Context, repoID, id int64) error {
return err
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- if _, err = db.DeleteByID[Milestone](ctx, m.ID); err != nil {
- return err
- }
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ if _, err = db.DeleteByID[Milestone](ctx, m.ID); err != nil {
+ return err
+ }
- numMilestones, err := db.Count[Milestone](ctx, FindMilestoneOptions{
- RepoID: repo.ID,
- })
- if err != nil {
- return err
- }
- numClosedMilestones, err := db.Count[Milestone](ctx, FindMilestoneOptions{
- RepoID: repo.ID,
- IsClosed: optional.Some(true),
- })
- if err != nil {
- return err
- }
- repo.NumMilestones = int(numMilestones)
- repo.NumClosedMilestones = int(numClosedMilestones)
+ numMilestones, err := db.Count[Milestone](ctx, FindMilestoneOptions{
+ RepoID: repo.ID,
+ })
+ if err != nil {
+ return err
+ }
+ numClosedMilestones, err := db.Count[Milestone](ctx, FindMilestoneOptions{
+ RepoID: repo.ID,
+ IsClosed: optional.Some(true),
+ })
+ if err != nil {
+ return err
+ }
+ repo.NumMilestones = int(numMilestones)
+ repo.NumClosedMilestones = int(numClosedMilestones)
- if _, err = db.GetEngine(ctx).ID(repo.ID).Cols("num_milestones, num_closed_milestones").Update(repo); err != nil {
- return err
- }
+ if _, err = db.GetEngine(ctx).ID(repo.ID).Cols("num_milestones, num_closed_milestones").Update(repo); err != nil {
+ return err
+ }
- if _, err = db.Exec(ctx, "UPDATE `issue` SET milestone_id = 0 WHERE milestone_id = ?", m.ID); err != nil {
+ _, err = db.Exec(ctx, "UPDATE `issue` SET milestone_id = 0 WHERE milestone_id = ?", m.ID)
return err
- }
- return committer.Commit()
+ })
}
func updateRepoMilestoneNum(ctx context.Context, repoID int64) error {
@@ -360,22 +327,15 @@ func InsertMilestones(ctx context.Context, ms ...*Milestone) (err error) {
return nil
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
- sess := db.GetEngine(ctx)
-
- // to return the id, so we should not use batch insert
- for _, m := range ms {
- if _, err = sess.NoAutoTime().Insert(m); err != nil {
- return err
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ // to return the id, so we should not use batch insert
+ for _, m := range ms {
+ if _, err = db.GetEngine(ctx).NoAutoTime().Insert(m); err != nil {
+ return err
+ }
}
- }
- if _, err = db.Exec(ctx, "UPDATE `repository` SET num_milestones = num_milestones + ? WHERE id = ?", len(ms), ms[0].RepoID); err != nil {
+ _, err = db.Exec(ctx, "UPDATE `repository` SET num_milestones = num_milestones + ? WHERE id = ?", len(ms), ms[0].RepoID)
return err
- }
- return committer.Commit()
+ })
}
diff --git a/models/issues/milestone_test.go b/models/issues/milestone_test.go
index 28cd0c028b..f73355c27d 100644
--- a/models/issues/milestone_test.go
+++ b/models/issues/milestone_test.go
@@ -69,7 +69,7 @@ func TestGetMilestonesByRepoID(t *testing.T) {
assert.Len(t, milestones, n)
for _, milestone := range milestones {
- assert.EqualValues(t, repoID, milestone.RepoID)
+ assert.Equal(t, repoID, milestone.RepoID)
}
}
test(1, api.StateOpen)
@@ -327,7 +327,7 @@ func TestUpdateMilestone(t *testing.T) {
milestone.Content = "newMilestoneContent"
assert.NoError(t, issues_model.UpdateMilestone(db.DefaultContext, milestone, milestone.IsClosed))
milestone = unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1})
- assert.EqualValues(t, "newMilestoneName", milestone.Name)
+ assert.Equal(t, "newMilestoneName", milestone.Name)
unittest.CheckConsistencyFor(t, &issues_model.Milestone{})
}
@@ -364,7 +364,7 @@ func TestMigrate_InsertMilestones(t *testing.T) {
assert.NoError(t, err)
unittest.AssertExistsAndLoadBean(t, ms)
repoModified := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repo.ID})
- assert.EqualValues(t, repo.NumMilestones+1, repoModified.NumMilestones)
+ assert.Equal(t, repo.NumMilestones+1, repoModified.NumMilestones)
unittest.CheckConsistencyFor(t, &issues_model.Milestone{})
}
diff --git a/models/issues/pull.go b/models/issues/pull.go
index e3af00224d..00d7bfe1ca 100644
--- a/models/issues/pull.go
+++ b/models/issues/pull.go
@@ -6,10 +6,10 @@ package issues
import (
"context"
+ "errors"
"fmt"
"io"
"regexp"
- "strconv"
"strings"
"code.gitea.io/gitea/models/db"
@@ -103,27 +103,6 @@ const (
PullRequestStatusAncestor
)
-func (status PullRequestStatus) String() string {
- switch status {
- case PullRequestStatusConflict:
- return "CONFLICT"
- case PullRequestStatusChecking:
- return "CHECKING"
- case PullRequestStatusMergeable:
- return "MERGEABLE"
- case PullRequestStatusManuallyMerged:
- return "MANUALLY_MERGED"
- case PullRequestStatusError:
- return "ERROR"
- case PullRequestStatusEmpty:
- return "EMPTY"
- case PullRequestStatusAncestor:
- return "ANCESTOR"
- default:
- return strconv.Itoa(int(status))
- }
-}
-
// PullRequestFlow the flow of pull request
type PullRequestFlow int
@@ -385,17 +364,10 @@ func (pr *PullRequest) GetApprovers(ctx context.Context) string {
func (pr *PullRequest) getReviewedByLines(ctx context.Context, writer io.Writer) error {
maxReviewers := setting.Repository.PullRequest.DefaultMergeMessageMaxApprovers
-
if maxReviewers == 0 {
return nil
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
// Note: This doesn't page as we only expect a very limited number of reviews
reviews, err := FindLatestReviews(ctx, FindReviewOptions{
Types: []ReviewType{ReviewTypeApprove},
@@ -431,11 +403,11 @@ func (pr *PullRequest) getReviewedByLines(ctx context.Context, writer io.Writer)
}
reviewersWritten++
}
- return committer.Commit()
+ return nil
}
-// GetGitRefName returns git ref for hidden pull request branch
-func (pr *PullRequest) GetGitRefName() string {
+// GetGitHeadRefName returns git ref for hidden pull request branch
+func (pr *PullRequest) GetGitHeadRefName() string {
return fmt.Sprintf("%s%d/head", git.PullPrefix, pr.Index)
}
@@ -485,45 +457,36 @@ func (pr *PullRequest) IsFromFork() bool {
// NewPullRequest creates new pull request with labels for repository.
func NewPullRequest(ctx context.Context, repo *repo_model.Repository, issue *Issue, labelIDs []int64, uuids []string, pr *PullRequest) (err error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- idx, err := db.GetNextResourceIndex(ctx, "issue_index", repo.ID)
- if err != nil {
- return fmt.Errorf("generate pull request index failed: %w", err)
- }
-
- issue.Index = idx
- issue.Title = util.EllipsisDisplayString(issue.Title, 255)
-
- if err = NewIssueWithIndex(ctx, issue.Poster, NewIssueOptions{
- Repo: repo,
- Issue: issue,
- LabelIDs: labelIDs,
- Attachments: uuids,
- IsPull: true,
- }); err != nil {
- if repo_model.IsErrUserDoesNotHaveAccessToRepo(err) || IsErrNewIssueInsert(err) {
- return err
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ idx, err := db.GetNextResourceIndex(ctx, "issue_index", repo.ID)
+ if err != nil {
+ return fmt.Errorf("generate pull request index failed: %w", err)
}
- return fmt.Errorf("newIssue: %w", err)
- }
-
- pr.Index = issue.Index
- pr.BaseRepo = repo
- pr.IssueID = issue.ID
- if err = db.Insert(ctx, pr); err != nil {
- return fmt.Errorf("insert pull repo: %w", err)
- }
- if err = committer.Commit(); err != nil {
- return fmt.Errorf("Commit: %w", err)
- }
+ issue.Index = idx
+ issue.Title = util.EllipsisDisplayString(issue.Title, 255)
+
+ if err = NewIssueWithIndex(ctx, issue.Poster, NewIssueOptions{
+ Repo: repo,
+ Issue: issue,
+ LabelIDs: labelIDs,
+ Attachments: uuids,
+ IsPull: true,
+ }); err != nil {
+ if repo_model.IsErrUserDoesNotHaveAccessToRepo(err) || IsErrNewIssueInsert(err) {
+ return err
+ }
+ return fmt.Errorf("newIssue: %w", err)
+ }
- return nil
+ pr.Index = issue.Index
+ pr.BaseRepo = repo
+ pr.IssueID = issue.ID
+ if err = db.Insert(ctx, pr); err != nil {
+ return fmt.Errorf("insert pull repo: %w", err)
+ }
+ return nil
+ })
}
// ErrUserMustCollaborator represents an error that the user must be a collaborator to a given repo.
@@ -670,12 +633,6 @@ func GetAllUnmergedAgitPullRequestByPoster(ctx context.Context, uid int64) ([]*P
return pulls, err
}
-// Update updates all fields of pull request.
-func (pr *PullRequest) Update(ctx context.Context) error {
- _, err := db.GetEngine(ctx).ID(pr.ID).AllCols().Update(pr)
- return err
-}
-
// UpdateCols updates specific fields of pull request.
func (pr *PullRequest) UpdateCols(ctx context.Context, cols ...string) error {
_, err := db.GetEngine(ctx).ID(pr.ID).Cols(cols...).Update(pr)
@@ -732,7 +689,7 @@ func (pr *PullRequest) GetWorkInProgressPrefix(ctx context.Context) string {
// UpdateCommitDivergence update Divergence of a pull request
func (pr *PullRequest) UpdateCommitDivergence(ctx context.Context, ahead, behind int) error {
if pr.ID == 0 {
- return fmt.Errorf("pull ID is 0")
+ return errors.New("pull ID is 0")
}
pr.CommitsAhead = ahead
pr.CommitsBehind = behind
@@ -925,7 +882,7 @@ func ParseCodeOwnersLine(ctx context.Context, tokens []string) (*CodeOwnerRule,
if strings.Contains(user, "/") {
s := strings.Split(user, "/")
if len(s) != 2 {
- warnings = append(warnings, fmt.Sprintf("incorrect codeowner group: %s", user))
+ warnings = append(warnings, "incorrect codeowner group: "+user)
continue
}
orgName := s[0]
@@ -933,12 +890,12 @@ func ParseCodeOwnersLine(ctx context.Context, tokens []string) (*CodeOwnerRule,
org, err := org_model.GetOrgByName(ctx, orgName)
if err != nil {
- warnings = append(warnings, fmt.Sprintf("incorrect codeowner organization: %s", user))
+ warnings = append(warnings, "incorrect codeowner organization: "+user)
continue
}
teams, err := org.LoadTeams(ctx)
if err != nil {
- warnings = append(warnings, fmt.Sprintf("incorrect codeowner team: %s", user))
+ warnings = append(warnings, "incorrect codeowner team: "+user)
continue
}
@@ -950,7 +907,7 @@ func ParseCodeOwnersLine(ctx context.Context, tokens []string) (*CodeOwnerRule,
} else {
u, err := user_model.GetUserByName(ctx, user)
if err != nil {
- warnings = append(warnings, fmt.Sprintf("incorrect codeowner user: %s", user))
+ warnings = append(warnings, "incorrect codeowner user: "+user)
continue
}
rule.Users = append(rule.Users, u)
@@ -1004,22 +961,18 @@ func TokenizeCodeOwnersLine(line string) []string {
// InsertPullRequests inserted pull requests
func InsertPullRequests(ctx context.Context, prs ...*PullRequest) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
- sess := db.GetEngine(ctx)
- for _, pr := range prs {
- if err := insertIssue(ctx, pr.Issue); err != nil {
- return err
- }
- pr.IssueID = pr.Issue.ID
- if _, err := sess.NoAutoTime().Insert(pr); err != nil {
- return err
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ for _, pr := range prs {
+ if err := insertIssue(ctx, pr.Issue); err != nil {
+ return err
+ }
+ pr.IssueID = pr.Issue.ID
+ if _, err := db.GetEngine(ctx).NoAutoTime().Insert(pr); err != nil {
+ return err
+ }
}
- }
- return committer.Commit()
+ return nil
+ })
}
// GetPullRequestByMergedCommit returns a merged pull request by the given commit
diff --git a/models/issues/pull_list.go b/models/issues/pull_list.go
index b685175f8e..84f9f6166d 100644
--- a/models/issues/pull_list.go
+++ b/models/issues/pull_list.go
@@ -152,7 +152,8 @@ func PullRequests(ctx context.Context, baseRepoID int64, opts *PullRequestsOptio
applySorts(findSession, opts.SortType, 0)
findSession = db.SetSessionPagination(findSession, opts)
prs := make([]*PullRequest, 0, opts.PageSize)
- return prs, maxResults, findSession.Find(&prs)
+ found := findSession.Find(&prs)
+ return prs, maxResults, found
}
// PullRequestList defines a list of pull requests
diff --git a/models/issues/pull_list_test.go b/models/issues/pull_list_test.go
index f5553e7885..eb2de006d6 100644
--- a/models/issues/pull_list_test.go
+++ b/models/issues/pull_list_test.go
@@ -40,7 +40,7 @@ func TestPullRequestList_LoadReviewCommentsCounts(t *testing.T) {
assert.NoError(t, err)
assert.Len(t, reviewComments, 2)
for _, pr := range prs {
- assert.EqualValues(t, 1, reviewComments[pr.IssueID])
+ assert.Equal(t, 1, reviewComments[pr.IssueID])
}
}
diff --git a/models/issues/pull_test.go b/models/issues/pull_test.go
index 090659864a..39efaa5792 100644
--- a/models/issues/pull_test.go
+++ b/models/issues/pull_test.go
@@ -14,6 +14,7 @@ import (
"code.gitea.io/gitea/modules/setting"
"github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
)
func TestPullRequest_LoadAttributes(t *testing.T) {
@@ -76,6 +77,47 @@ func TestPullRequestsNewest(t *testing.T) {
}
}
+func TestPullRequests_Closed_RecentSortType(t *testing.T) {
+ // Issue ID | Closed At. | Updated At
+ // 2 | 1707270001 | 1707270001
+ // 3 | 1707271000 | 1707279999
+ // 11 | 1707279999 | 1707275555
+ tests := []struct {
+ sortType string
+ expectedIssueIDOrder []int64
+ }{
+ {"recentupdate", []int64{3, 11, 2}},
+ {"recentclose", []int64{11, 3, 2}},
+ }
+
+ assert.NoError(t, unittest.PrepareTestDatabase())
+ _, err := db.Exec(db.DefaultContext, "UPDATE issue SET closed_unix = 1707270001, updated_unix = 1707270001, is_closed = true WHERE id = 2")
+ require.NoError(t, err)
+ _, err = db.Exec(db.DefaultContext, "UPDATE issue SET closed_unix = 1707271000, updated_unix = 1707279999, is_closed = true WHERE id = 3")
+ require.NoError(t, err)
+ _, err = db.Exec(db.DefaultContext, "UPDATE issue SET closed_unix = 1707279999, updated_unix = 1707275555, is_closed = true WHERE id = 11")
+ require.NoError(t, err)
+
+ for _, test := range tests {
+ t.Run(test.sortType, func(t *testing.T) {
+ prs, _, err := issues_model.PullRequests(db.DefaultContext, 1, &issues_model.PullRequestsOptions{
+ ListOptions: db.ListOptions{
+ Page: 1,
+ },
+ State: "closed",
+ SortType: test.sortType,
+ })
+ require.NoError(t, err)
+
+ if assert.Len(t, prs, len(test.expectedIssueIDOrder)) {
+ for i := range test.expectedIssueIDOrder {
+ assert.Equal(t, test.expectedIssueIDOrder[i], prs[i].IssueID)
+ }
+ }
+ })
+ }
+}
+
func TestLoadRequestedReviewers(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
@@ -206,19 +248,6 @@ func TestGetPullRequestByIssueID(t *testing.T) {
assert.True(t, issues_model.IsErrPullRequestNotExist(err))
}
-func TestPullRequest_Update(t *testing.T) {
- assert.NoError(t, unittest.PrepareTestDatabase())
- pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1})
- pr.BaseBranch = "baseBranch"
- pr.HeadBranch = "headBranch"
- pr.Update(db.DefaultContext)
-
- pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: pr.ID})
- assert.Equal(t, "baseBranch", pr.BaseBranch)
- assert.Equal(t, "headBranch", pr.HeadBranch)
- unittest.CheckConsistencyFor(t, pr)
-}
-
func TestPullRequest_UpdateCols(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
pr := &issues_model.PullRequest{
@@ -285,7 +314,7 @@ func TestDeleteOrphanedObjects(t *testing.T) {
countAfter, err := db.GetEngine(db.DefaultContext).Count(&issues_model.PullRequest{})
assert.NoError(t, err)
- assert.EqualValues(t, countBefore, countAfter)
+ assert.Equal(t, countBefore, countAfter)
}
func TestParseCodeOwnersLine(t *testing.T) {
@@ -318,7 +347,7 @@ func TestGetApprovers(t *testing.T) {
setting.Repository.PullRequest.DefaultMergeMessageOfficialApproversOnly = false
approvers := pr.GetApprovers(db.DefaultContext)
expected := "Reviewed-by: User Five <user5@example.com>\nReviewed-by: Org Six <org6@example.com>\n"
- assert.EqualValues(t, expected, approvers)
+ assert.Equal(t, expected, approvers)
}
func TestGetPullRequestByMergedCommit(t *testing.T) {
diff --git a/models/issues/reaction.go b/models/issues/reaction.go
index f24001fd23..3b5ad6d7ab 100644
--- a/models/issues/reaction.go
+++ b/models/issues/reaction.go
@@ -224,21 +224,9 @@ func CreateReaction(ctx context.Context, opts *ReactionOptions) (*Reaction, erro
return nil, ErrForbiddenIssueReaction{opts.Type}
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return nil, err
- }
- defer committer.Close()
-
- reaction, err := createReaction(ctx, opts)
- if err != nil {
- return reaction, err
- }
-
- if err := committer.Commit(); err != nil {
- return nil, err
- }
- return reaction, nil
+ return db.WithTx2(ctx, func(ctx context.Context) (*Reaction, error) {
+ return createReaction(ctx, opts)
+ })
}
// DeleteReaction deletes reaction for issue or comment.
diff --git a/models/issues/review.go b/models/issues/review.go
index 1c5c2ee30a..b758fa5ffa 100644
--- a/models/issues/review.go
+++ b/models/issues/review.go
@@ -5,6 +5,7 @@ package issues
import (
"context"
+ "errors"
"fmt"
"slices"
"strings"
@@ -333,54 +334,51 @@ func IsOfficialReviewerTeam(ctx context.Context, issue *Issue, team *organizatio
// CreateReview creates a new review based on opts
func CreateReview(ctx context.Context, opts CreateReviewOptions) (*Review, error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return nil, err
- }
- defer committer.Close()
- sess := db.GetEngine(ctx)
-
- review := &Review{
- Issue: opts.Issue,
- IssueID: opts.Issue.ID,
- Reviewer: opts.Reviewer,
- ReviewerTeam: opts.ReviewerTeam,
- Content: opts.Content,
- Official: opts.Official,
- CommitID: opts.CommitID,
- Stale: opts.Stale,
- }
+ return db.WithTx2(ctx, func(ctx context.Context) (*Review, error) {
+ sess := db.GetEngine(ctx)
+
+ review := &Review{
+ Issue: opts.Issue,
+ IssueID: opts.Issue.ID,
+ Reviewer: opts.Reviewer,
+ ReviewerTeam: opts.ReviewerTeam,
+ Content: opts.Content,
+ Official: opts.Official,
+ CommitID: opts.CommitID,
+ Stale: opts.Stale,
+ }
- if opts.Reviewer != nil {
- review.Type = opts.Type
- review.ReviewerID = opts.Reviewer.ID
+ if opts.Reviewer != nil {
+ review.Type = opts.Type
+ review.ReviewerID = opts.Reviewer.ID
- reviewCond := builder.Eq{"reviewer_id": opts.Reviewer.ID, "issue_id": opts.Issue.ID}
- // make sure user review requests are cleared
- if opts.Type != ReviewTypePending {
- if _, err := sess.Where(reviewCond.And(builder.Eq{"type": ReviewTypeRequest})).Delete(new(Review)); err != nil {
- return nil, err
+ reviewCond := builder.Eq{"reviewer_id": opts.Reviewer.ID, "issue_id": opts.Issue.ID}
+ // make sure user review requests are cleared
+ if opts.Type != ReviewTypePending {
+ if _, err := sess.Where(reviewCond.And(builder.Eq{"type": ReviewTypeRequest})).Delete(new(Review)); err != nil {
+ return nil, err
+ }
}
- }
- // make sure if the created review gets dismissed no old review surface
- // other types can be ignored, as they don't affect branch protection
- if opts.Type == ReviewTypeApprove || opts.Type == ReviewTypeReject {
- if _, err := sess.Where(reviewCond.And(builder.In("type", ReviewTypeApprove, ReviewTypeReject))).
- Cols("dismissed").Update(&Review{Dismissed: true}); err != nil {
- return nil, err
+ // make sure if the created review gets dismissed no old review surface
+ // other types can be ignored, as they don't affect branch protection
+ if opts.Type == ReviewTypeApprove || opts.Type == ReviewTypeReject {
+ if _, err := sess.Where(reviewCond.And(builder.In("type", ReviewTypeApprove, ReviewTypeReject))).
+ Cols("dismissed").Update(&Review{Dismissed: true}); err != nil {
+ return nil, err
+ }
}
+ } else if opts.ReviewerTeam != nil {
+ review.Type = ReviewTypeRequest
+ review.ReviewerTeamID = opts.ReviewerTeam.ID
+ } else {
+ return nil, errors.New("provide either reviewer or reviewer team")
}
- } else if opts.ReviewerTeam != nil {
- review.Type = ReviewTypeRequest
- review.ReviewerTeamID = opts.ReviewerTeam.ID
- } else {
- return nil, fmt.Errorf("provide either reviewer or reviewer team")
- }
- if _, err := sess.Insert(review); err != nil {
- return nil, err
- }
- return review, committer.Commit()
+ if _, err := sess.Insert(review); err != nil {
+ return nil, err
+ }
+ return review, nil
+ })
}
// GetCurrentReview returns the current pending review of reviewer for given issue
@@ -604,168 +602,152 @@ func DismissReview(ctx context.Context, review *Review, isDismiss bool) (err err
// InsertReviews inserts review and review comments
func InsertReviews(ctx context.Context, reviews []*Review) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
- sess := db.GetEngine(ctx)
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ sess := db.GetEngine(ctx)
- for _, review := range reviews {
- if _, err := sess.NoAutoTime().Insert(review); err != nil {
- return err
- }
+ for _, review := range reviews {
+ if _, err := sess.NoAutoTime().Insert(review); err != nil {
+ return err
+ }
- if _, err := sess.NoAutoTime().Insert(&Comment{
- Type: CommentTypeReview,
- Content: review.Content,
- PosterID: review.ReviewerID,
- OriginalAuthor: review.OriginalAuthor,
- OriginalAuthorID: review.OriginalAuthorID,
- IssueID: review.IssueID,
- ReviewID: review.ID,
- CreatedUnix: review.CreatedUnix,
- UpdatedUnix: review.UpdatedUnix,
- }); err != nil {
- return err
- }
+ if _, err := sess.NoAutoTime().Insert(&Comment{
+ Type: CommentTypeReview,
+ Content: review.Content,
+ PosterID: review.ReviewerID,
+ OriginalAuthor: review.OriginalAuthor,
+ OriginalAuthorID: review.OriginalAuthorID,
+ IssueID: review.IssueID,
+ ReviewID: review.ID,
+ CreatedUnix: review.CreatedUnix,
+ UpdatedUnix: review.UpdatedUnix,
+ }); err != nil {
+ return err
+ }
- for _, c := range review.Comments {
- c.ReviewID = review.ID
- }
+ for _, c := range review.Comments {
+ c.ReviewID = review.ID
+ }
- if len(review.Comments) > 0 {
- if _, err := sess.NoAutoTime().Insert(review.Comments); err != nil {
- return err
+ if len(review.Comments) > 0 {
+ if _, err := sess.NoAutoTime().Insert(review.Comments); err != nil {
+ return err
+ }
}
- }
- if err := UpdateIssueNumComments(ctx, review.IssueID); err != nil {
- return err
+ if err := UpdateIssueNumComments(ctx, review.IssueID); err != nil {
+ return err
+ }
}
- }
-
- return committer.Commit()
+ return nil
+ })
}
// AddReviewRequest add a review request from one reviewer
func AddReviewRequest(ctx context.Context, issue *Issue, reviewer, doer *user_model.User) (*Comment, error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return nil, err
- }
- defer committer.Close()
- sess := db.GetEngine(ctx)
+ return db.WithTx2(ctx, func(ctx context.Context) (*Comment, error) {
+ sess := db.GetEngine(ctx)
- review, err := GetReviewByIssueIDAndUserID(ctx, issue.ID, reviewer.ID)
- if err != nil && !IsErrReviewNotExist(err) {
- return nil, err
- }
-
- if review != nil {
- // skip it when reviewer hase been request to review
- if review.Type == ReviewTypeRequest {
- return nil, committer.Commit() // still commit the transaction, or committer.Close() will rollback it, even if it's a reused transaction.
- }
-
- if issue.IsClosed {
- return nil, ErrReviewRequestOnClosedPR{}
+ review, err := GetReviewByIssueIDAndUserID(ctx, issue.ID, reviewer.ID)
+ if err != nil && !IsErrReviewNotExist(err) {
+ return nil, err
}
- if issue.IsPull {
- if err := issue.LoadPullRequest(ctx); err != nil {
- return nil, err
+ if review != nil {
+ // skip it when reviewer has been request to review
+ if review.Type == ReviewTypeRequest {
+ return nil, nil // still commit the transaction, or committer.Close() will rollback it, even if it's a reused transaction.
}
- if issue.PullRequest.HasMerged {
+
+ if issue.IsClosed {
return nil, ErrReviewRequestOnClosedPR{}
}
+
+ if issue.IsPull {
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ return nil, err
+ }
+ if issue.PullRequest.HasMerged {
+ return nil, ErrReviewRequestOnClosedPR{}
+ }
+ }
}
- }
- // if the reviewer is an official reviewer,
- // remove the official flag in the all previous reviews
- official, err := IsOfficialReviewer(ctx, issue, reviewer)
- if err != nil {
- return nil, err
- } else if official {
- if _, err := sess.Exec("UPDATE `review` SET official=? WHERE issue_id=? AND reviewer_id=?", false, issue.ID, reviewer.ID); err != nil {
+ // if the reviewer is an official reviewer,
+ // remove the official flag in the all previous reviews
+ official, err := IsOfficialReviewer(ctx, issue, reviewer)
+ if err != nil {
return nil, err
+ } else if official {
+ if _, err := sess.Exec("UPDATE `review` SET official=? WHERE issue_id=? AND reviewer_id=?", false, issue.ID, reviewer.ID); err != nil {
+ return nil, err
+ }
}
- }
- review, err = CreateReview(ctx, CreateReviewOptions{
- Type: ReviewTypeRequest,
- Issue: issue,
- Reviewer: reviewer,
- Official: official,
- Stale: false,
- })
- if err != nil {
- return nil, err
- }
+ review, err = CreateReview(ctx, CreateReviewOptions{
+ Type: ReviewTypeRequest,
+ Issue: issue,
+ Reviewer: reviewer,
+ Official: official,
+ Stale: false,
+ })
+ if err != nil {
+ return nil, err
+ }
- comment, err := CreateComment(ctx, &CreateCommentOptions{
- Type: CommentTypeReviewRequest,
- Doer: doer,
- Repo: issue.Repo,
- Issue: issue,
- RemovedAssignee: false, // Use RemovedAssignee as !isRequest
- AssigneeID: reviewer.ID, // Use AssigneeID as reviewer ID
- ReviewID: review.ID,
- })
- if err != nil {
- return nil, err
- }
+ comment, err := CreateComment(ctx, &CreateCommentOptions{
+ Type: CommentTypeReviewRequest,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: issue,
+ RemovedAssignee: false, // Use RemovedAssignee as !isRequest
+ AssigneeID: reviewer.ID, // Use AssigneeID as reviewer ID
+ ReviewID: review.ID,
+ })
+ if err != nil {
+ return nil, err
+ }
- // func caller use the created comment to retrieve created review too.
- comment.Review = review
+ // func caller use the created comment to retrieve created review too.
+ comment.Review = review
- return comment, committer.Commit()
+ return comment, nil
+ })
}
// RemoveReviewRequest remove a review request from one reviewer
func RemoveReviewRequest(ctx context.Context, issue *Issue, reviewer, doer *user_model.User) (*Comment, error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return nil, err
- }
- defer committer.Close()
-
- review, err := GetReviewByIssueIDAndUserID(ctx, issue.ID, reviewer.ID)
- if err != nil && !IsErrReviewNotExist(err) {
- return nil, err
- }
+ return db.WithTx2(ctx, func(ctx context.Context) (*Comment, error) {
+ review, err := GetReviewByIssueIDAndUserID(ctx, issue.ID, reviewer.ID)
+ if err != nil && !IsErrReviewNotExist(err) {
+ return nil, err
+ }
- if review == nil || review.Type != ReviewTypeRequest {
- return nil, nil
- }
+ if review == nil || review.Type != ReviewTypeRequest {
+ return nil, nil
+ }
- if _, err = db.DeleteByBean(ctx, review); err != nil {
- return nil, err
- }
+ if _, err = db.DeleteByBean(ctx, review); err != nil {
+ return nil, err
+ }
- official, err := IsOfficialReviewer(ctx, issue, reviewer)
- if err != nil {
- return nil, err
- } else if official {
- if err := restoreLatestOfficialReview(ctx, issue.ID, reviewer.ID); err != nil {
+ official, err := IsOfficialReviewer(ctx, issue, reviewer)
+ if err != nil {
return nil, err
+ } else if official {
+ if err := restoreLatestOfficialReview(ctx, issue.ID, reviewer.ID); err != nil {
+ return nil, err
+ }
}
- }
- comment, err := CreateComment(ctx, &CreateCommentOptions{
- Type: CommentTypeReviewRequest,
- Doer: doer,
- Repo: issue.Repo,
- Issue: issue,
- RemovedAssignee: true, // Use RemovedAssignee as !isRequest
- AssigneeID: reviewer.ID, // Use AssigneeID as reviewer ID
+ return CreateComment(ctx, &CreateCommentOptions{
+ Type: CommentTypeReviewRequest,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: issue,
+ RemovedAssignee: true, // Use RemovedAssignee as !isRequest
+ AssigneeID: reviewer.ID, // Use AssigneeID as reviewer ID
+ })
})
- if err != nil {
- return nil, err
- }
-
- return comment, committer.Commit()
}
// Recalculate the latest official review for reviewer
@@ -786,120 +768,112 @@ func restoreLatestOfficialReview(ctx context.Context, issueID, reviewerID int64)
// AddTeamReviewRequest add a review request from one team
func AddTeamReviewRequest(ctx context.Context, issue *Issue, reviewer *organization.Team, doer *user_model.User) (*Comment, error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return nil, err
- }
- defer committer.Close()
+ return db.WithTx2(ctx, func(ctx context.Context) (*Comment, error) {
+ review, err := GetTeamReviewerByIssueIDAndTeamID(ctx, issue.ID, reviewer.ID)
+ if err != nil && !IsErrReviewNotExist(err) {
+ return nil, err
+ }
- review, err := GetTeamReviewerByIssueIDAndTeamID(ctx, issue.ID, reviewer.ID)
- if err != nil && !IsErrReviewNotExist(err) {
- return nil, err
- }
+ // This team already has been requested to review - therefore skip this.
+ if review != nil {
+ return nil, nil
+ }
- // This team already has been requested to review - therefore skip this.
- if review != nil {
- return nil, nil
- }
+ official, err := IsOfficialReviewerTeam(ctx, issue, reviewer)
+ if err != nil {
+ return nil, fmt.Errorf("isOfficialReviewerTeam(): %w", err)
+ } else if !official {
+ if official, err = IsOfficialReviewer(ctx, issue, doer); err != nil {
+ return nil, fmt.Errorf("isOfficialReviewer(): %w", err)
+ }
+ }
- official, err := IsOfficialReviewerTeam(ctx, issue, reviewer)
- if err != nil {
- return nil, fmt.Errorf("isOfficialReviewerTeam(): %w", err)
- } else if !official {
- if official, err = IsOfficialReviewer(ctx, issue, doer); err != nil {
- return nil, fmt.Errorf("isOfficialReviewer(): %w", err)
+ if review, err = CreateReview(ctx, CreateReviewOptions{
+ Type: ReviewTypeRequest,
+ Issue: issue,
+ ReviewerTeam: reviewer,
+ Official: official,
+ Stale: false,
+ }); err != nil {
+ return nil, err
}
- }
- if review, err = CreateReview(ctx, CreateReviewOptions{
- Type: ReviewTypeRequest,
- Issue: issue,
- ReviewerTeam: reviewer,
- Official: official,
- Stale: false,
- }); err != nil {
- return nil, err
- }
+ if official {
+ if _, err := db.Exec(ctx, "UPDATE `review` SET official=? WHERE issue_id=? AND reviewer_team_id=?", false, issue.ID, reviewer.ID); err != nil {
+ return nil, err
+ }
+ }
- if official {
- if _, err := db.Exec(ctx, "UPDATE `review` SET official=? WHERE issue_id=? AND reviewer_team_id=?", false, issue.ID, reviewer.ID); err != nil {
- return nil, err
+ comment, err := CreateComment(ctx, &CreateCommentOptions{
+ Type: CommentTypeReviewRequest,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: issue,
+ RemovedAssignee: false, // Use RemovedAssignee as !isRequest
+ AssigneeTeamID: reviewer.ID, // Use AssigneeTeamID as reviewer team ID
+ ReviewID: review.ID,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("CreateComment(): %w", err)
}
- }
- comment, err := CreateComment(ctx, &CreateCommentOptions{
- Type: CommentTypeReviewRequest,
- Doer: doer,
- Repo: issue.Repo,
- Issue: issue,
- RemovedAssignee: false, // Use RemovedAssignee as !isRequest
- AssigneeTeamID: reviewer.ID, // Use AssigneeTeamID as reviewer team ID
- ReviewID: review.ID,
+ return comment, nil
})
- if err != nil {
- return nil, fmt.Errorf("CreateComment(): %w", err)
- }
-
- return comment, committer.Commit()
}
// RemoveTeamReviewRequest remove a review request from one team
func RemoveTeamReviewRequest(ctx context.Context, issue *Issue, reviewer *organization.Team, doer *user_model.User) (*Comment, error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return nil, err
- }
- defer committer.Close()
-
- review, err := GetTeamReviewerByIssueIDAndTeamID(ctx, issue.ID, reviewer.ID)
- if err != nil && !IsErrReviewNotExist(err) {
- return nil, err
- }
-
- if review == nil {
- return nil, nil
- }
-
- if _, err = db.DeleteByBean(ctx, review); err != nil {
- return nil, err
- }
+ return db.WithTx2(ctx, func(ctx context.Context) (*Comment, error) {
+ review, err := GetTeamReviewerByIssueIDAndTeamID(ctx, issue.ID, reviewer.ID)
+ if err != nil && !IsErrReviewNotExist(err) {
+ return nil, err
+ }
- official, err := IsOfficialReviewerTeam(ctx, issue, reviewer)
- if err != nil {
- return nil, fmt.Errorf("isOfficialReviewerTeam(): %w", err)
- }
+ if review == nil {
+ return nil, nil
+ }
- if official {
- // recalculate which is the latest official review from that team
- review, err := GetReviewByIssueIDAndUserID(ctx, issue.ID, -reviewer.ID)
- if err != nil && !IsErrReviewNotExist(err) {
+ if _, err = db.DeleteByBean(ctx, review); err != nil {
return nil, err
}
- if review != nil {
- if _, err := db.Exec(ctx, "UPDATE `review` SET official=? WHERE id=?", true, review.ID); err != nil {
+ official, err := IsOfficialReviewerTeam(ctx, issue, reviewer)
+ if err != nil {
+ return nil, fmt.Errorf("isOfficialReviewerTeam(): %w", err)
+ }
+
+ if official {
+ // recalculate which is the latest official review from that team
+ review, err := GetReviewByIssueIDAndUserID(ctx, issue.ID, -reviewer.ID)
+ if err != nil && !IsErrReviewNotExist(err) {
return nil, err
}
+
+ if review != nil {
+ if _, err := db.Exec(ctx, "UPDATE `review` SET official=? WHERE id=?", true, review.ID); err != nil {
+ return nil, err
+ }
+ }
}
- }
- if doer == nil {
- return nil, committer.Commit()
- }
+ if doer == nil {
+ return nil, nil
+ }
- comment, err := CreateComment(ctx, &CreateCommentOptions{
- Type: CommentTypeReviewRequest,
- Doer: doer,
- Repo: issue.Repo,
- Issue: issue,
- RemovedAssignee: true, // Use RemovedAssignee as !isRequest
- AssigneeTeamID: reviewer.ID, // Use AssigneeTeamID as reviewer team ID
- })
- if err != nil {
- return nil, fmt.Errorf("CreateComment(): %w", err)
- }
+ comment, err := CreateComment(ctx, &CreateCommentOptions{
+ Type: CommentTypeReviewRequest,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: issue,
+ RemovedAssignee: true, // Use RemovedAssignee as !isRequest
+ AssigneeTeamID: reviewer.ID, // Use AssigneeTeamID as reviewer team ID
+ })
+ if err != nil {
+ return nil, fmt.Errorf("CreateComment(): %w", err)
+ }
- return comment, committer.Commit()
+ return comment, nil
+ })
}
// MarkConversation Add or remove Conversation mark for a code comment
@@ -933,7 +907,7 @@ func MarkConversation(ctx context.Context, comment *Comment, doer *user_model.Us
// the PR writer , official reviewer and poster can do it
func CanMarkConversation(ctx context.Context, issue *Issue, doer *user_model.User) (permResult bool, err error) {
if doer == nil || issue == nil {
- return false, fmt.Errorf("issue or doer is nil")
+ return false, errors.New("issue or doer is nil")
}
if err = issue.LoadRepo(ctx); err != nil {
@@ -965,61 +939,56 @@ func CanMarkConversation(ctx context.Context, issue *Issue, doer *user_model.Use
// DeleteReview delete a review and it's code comments
func DeleteReview(ctx context.Context, r *Review) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- if r.ID == 0 {
- return fmt.Errorf("review is not allowed to be 0")
- }
-
- if r.Type == ReviewTypeRequest {
- return fmt.Errorf("review request can not be deleted using this method")
- }
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ if r.ID == 0 {
+ return errors.New("review is not allowed to be 0")
+ }
- opts := FindCommentsOptions{
- Type: CommentTypeCode,
- IssueID: r.IssueID,
- ReviewID: r.ID,
- }
+ if r.Type == ReviewTypeRequest {
+ return errors.New("review request can not be deleted using this method")
+ }
- if _, err := db.Delete[Comment](ctx, opts); err != nil {
- return err
- }
+ opts := FindCommentsOptions{
+ Type: CommentTypeCode,
+ IssueID: r.IssueID,
+ ReviewID: r.ID,
+ }
- opts = FindCommentsOptions{
- Type: CommentTypeReview,
- IssueID: r.IssueID,
- ReviewID: r.ID,
- }
+ if _, err := db.Delete[Comment](ctx, opts); err != nil {
+ return err
+ }
- if _, err := db.Delete[Comment](ctx, opts); err != nil {
- return err
- }
+ opts = FindCommentsOptions{
+ Type: CommentTypeReview,
+ IssueID: r.IssueID,
+ ReviewID: r.ID,
+ }
- opts = FindCommentsOptions{
- Type: CommentTypeDismissReview,
- IssueID: r.IssueID,
- ReviewID: r.ID,
- }
+ if _, err := db.Delete[Comment](ctx, opts); err != nil {
+ return err
+ }
- if _, err := db.Delete[Comment](ctx, opts); err != nil {
- return err
- }
+ opts = FindCommentsOptions{
+ Type: CommentTypeDismissReview,
+ IssueID: r.IssueID,
+ ReviewID: r.ID,
+ }
- if _, err := db.DeleteByID[Review](ctx, r.ID); err != nil {
- return err
- }
+ if _, err := db.Delete[Comment](ctx, opts); err != nil {
+ return err
+ }
- if r.Official {
- if err := restoreLatestOfficialReview(ctx, r.IssueID, r.ReviewerID); err != nil {
+ if _, err := db.DeleteByID[Review](ctx, r.ID); err != nil {
return err
}
- }
- return committer.Commit()
+ if r.Official {
+ if err := restoreLatestOfficialReview(ctx, r.IssueID, r.ReviewerID); err != nil {
+ return err
+ }
+ }
+ return nil
+ })
}
// GetCodeCommentsCount return count of CodeComments a Review has
diff --git a/models/issues/review_list.go b/models/issues/review_list.go
index 928f24fb2d..bbb8c489fa 100644
--- a/models/issues/review_list.go
+++ b/models/issues/review_list.go
@@ -22,7 +22,7 @@ type ReviewList []*Review
// LoadReviewers loads reviewers
func (reviews ReviewList) LoadReviewers(ctx context.Context) error {
reviewerIDs := make([]int64, len(reviews))
- for i := 0; i < len(reviews); i++ {
+ for i := range reviews {
reviewerIDs[i] = reviews[i].ReviewerID
}
reviewers, err := user_model.GetPossibleUserByIDs(ctx, reviewerIDs)
diff --git a/models/issues/stopwatch.go b/models/issues/stopwatch.go
index 7c05a3a883..761b8f91a0 100644
--- a/models/issues/stopwatch.go
+++ b/models/issues/stopwatch.go
@@ -5,7 +5,6 @@ package issues
import (
"context"
- "fmt"
"time"
"code.gitea.io/gitea/models/db"
@@ -15,20 +14,6 @@ import (
"code.gitea.io/gitea/modules/util"
)
-// ErrIssueStopwatchNotExist represents an error that stopwatch is not exist
-type ErrIssueStopwatchNotExist struct {
- UserID int64
- IssueID int64
-}
-
-func (err ErrIssueStopwatchNotExist) Error() string {
- return fmt.Sprintf("issue stopwatch doesn't exist[uid: %d, issue_id: %d", err.UserID, err.IssueID)
-}
-
-func (err ErrIssueStopwatchNotExist) Unwrap() error {
- return util.ErrNotExist
-}
-
// Stopwatch represents a stopwatch for time tracking.
type Stopwatch struct {
ID int64 `xorm:"pk autoincr"`
@@ -55,13 +40,11 @@ func getStopwatch(ctx context.Context, userID, issueID int64) (sw *Stopwatch, ex
return sw, exists, err
}
-// UserIDCount is a simple coalition of UserID and Count
type UserStopwatch struct {
UserID int64
StopWatches []*Stopwatch
}
-// GetUIDsAndNotificationCounts between the two provided times
func GetUIDsAndStopwatch(ctx context.Context) ([]*UserStopwatch, error) {
sws := []*Stopwatch{}
if err := db.GetEngine(ctx).Where("issue_id != 0").Find(&sws); err != nil {
@@ -87,7 +70,7 @@ func GetUIDsAndStopwatch(ctx context.Context) ([]*UserStopwatch, error) {
return res, nil
}
-// GetUserStopwatches return list of all stopwatches of a user
+// GetUserStopwatches return list of the user's all stopwatches
func GetUserStopwatches(ctx context.Context, userID int64, listOptions db.ListOptions) ([]*Stopwatch, error) {
sws := make([]*Stopwatch, 0, 8)
sess := db.GetEngine(ctx).Where("stopwatch.user_id = ?", userID)
@@ -102,7 +85,7 @@ func GetUserStopwatches(ctx context.Context, userID int64, listOptions db.ListOp
return sws, nil
}
-// CountUserStopwatches return count of all stopwatches of a user
+// CountUserStopwatches return count of the user's all stopwatches
func CountUserStopwatches(ctx context.Context, userID int64) (int64, error) {
return db.GetEngine(ctx).Where("user_id = ?", userID).Count(&Stopwatch{})
}
@@ -136,43 +119,21 @@ func HasUserStopwatch(ctx context.Context, userID int64) (exists bool, sw *Stopw
return exists, sw, issue, err
}
-// FinishIssueStopwatchIfPossible if stopwatch exist then finish it otherwise ignore
-func FinishIssueStopwatchIfPossible(ctx context.Context, user *user_model.User, issue *Issue) error {
- _, exists, err := getStopwatch(ctx, user.ID, issue.ID)
- if err != nil {
- return err
- }
- if !exists {
- return nil
- }
- return FinishIssueStopwatch(ctx, user, issue)
-}
-
-// CreateOrStopIssueStopwatch create an issue stopwatch if it's not exist, otherwise finish it
-func CreateOrStopIssueStopwatch(ctx context.Context, user *user_model.User, issue *Issue) error {
- _, exists, err := getStopwatch(ctx, user.ID, issue.ID)
- if err != nil {
- return err
- }
- if exists {
- return FinishIssueStopwatch(ctx, user, issue)
- }
- return CreateIssueStopwatch(ctx, user, issue)
-}
-
-// FinishIssueStopwatch if stopwatch exist then finish it otherwise return an error
-func FinishIssueStopwatch(ctx context.Context, user *user_model.User, issue *Issue) error {
+// FinishIssueStopwatch if stopwatch exists, then finish it.
+func FinishIssueStopwatch(ctx context.Context, user *user_model.User, issue *Issue) (ok bool, err error) {
sw, exists, err := getStopwatch(ctx, user.ID, issue.ID)
if err != nil {
- return err
+ return false, err
+ } else if !exists {
+ return false, nil
}
- if !exists {
- return ErrIssueStopwatchNotExist{
- UserID: user.ID,
- IssueID: issue.ID,
- }
+ if err = finishIssueStopwatch(ctx, user, issue, sw); err != nil {
+ return false, err
}
+ return true, nil
+}
+func finishIssueStopwatch(ctx context.Context, user *user_model.User, issue *Issue, sw *Stopwatch) error {
// Create tracked time out of the time difference between start date and actual date
timediff := time.Now().Unix() - int64(sw.CreatedUnix)
@@ -184,14 +145,12 @@ func FinishIssueStopwatch(ctx context.Context, user *user_model.User, issue *Iss
Time: timediff,
}
- if err := db.Insert(ctx, tt); err != nil {
+ if err := issue.LoadRepo(ctx); err != nil {
return err
}
-
- if err := issue.LoadRepo(ctx); err != nil {
+ if err := db.Insert(ctx, tt); err != nil {
return err
}
-
if _, err := CreateComment(ctx, &CreateCommentOptions{
Doer: user,
Issue: issue,
@@ -202,83 +161,65 @@ func FinishIssueStopwatch(ctx context.Context, user *user_model.User, issue *Iss
}); err != nil {
return err
}
- _, err = db.DeleteByBean(ctx, sw)
+ _, err := db.DeleteByBean(ctx, sw)
return err
}
-// CreateIssueStopwatch creates a stopwatch if not exist, otherwise return an error
-func CreateIssueStopwatch(ctx context.Context, user *user_model.User, issue *Issue) error {
- if err := issue.LoadRepo(ctx); err != nil {
- return err
- }
-
- // if another stopwatch is running: stop it
- exists, _, otherIssue, err := HasUserStopwatch(ctx, user.ID)
- if err != nil {
- return err
- }
- if exists {
- if err := FinishIssueStopwatch(ctx, user, otherIssue); err != nil {
- return err
+// CreateIssueStopwatch creates a stopwatch if the issue doesn't have the user's stopwatch.
+// It also stops any other stopwatch that might be running for the user.
+func CreateIssueStopwatch(ctx context.Context, user *user_model.User, issue *Issue) (ok bool, err error) {
+ { // if another issue's stopwatch is running: stop it; if this issue has a stopwatch: return an error.
+ exists, otherStopWatch, otherIssue, err := HasUserStopwatch(ctx, user.ID)
+ if err != nil {
+ return false, err
+ }
+ if exists {
+ if otherStopWatch.IssueID == issue.ID {
+ // don't allow starting stopwatch for the same issue
+ return false, nil
+ }
+ // stop the other issue's stopwatch
+ if err = finishIssueStopwatch(ctx, user, otherIssue, otherStopWatch); err != nil {
+ return false, err
+ }
}
}
- // Create stopwatch
- sw := &Stopwatch{
- UserID: user.ID,
- IssueID: issue.ID,
+ if err = issue.LoadRepo(ctx); err != nil {
+ return false, err
}
-
- if err := db.Insert(ctx, sw); err != nil {
- return err
+ if err = db.Insert(ctx, &Stopwatch{UserID: user.ID, IssueID: issue.ID}); err != nil {
+ return false, err
}
-
- if err := issue.LoadRepo(ctx); err != nil {
- return err
- }
-
- if _, err := CreateComment(ctx, &CreateCommentOptions{
+ if _, err = CreateComment(ctx, &CreateCommentOptions{
Doer: user,
Issue: issue,
Repo: issue.Repo,
Type: CommentTypeStartTracking,
}); err != nil {
- return err
+ return false, err
}
-
- return nil
+ return true, nil
}
// CancelStopwatch removes the given stopwatch and logs it into issue's timeline.
-func CancelStopwatch(ctx context.Context, user *user_model.User, issue *Issue) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
- if err := cancelStopwatch(ctx, user, issue); err != nil {
- return err
- }
- return committer.Commit()
-}
-
-func cancelStopwatch(ctx context.Context, user *user_model.User, issue *Issue) error {
- e := db.GetEngine(ctx)
- sw, exists, err := getStopwatch(ctx, user.ID, issue.ID)
- if err != nil {
- return err
- }
-
- if exists {
- if _, err := e.Delete(sw); err != nil {
+func CancelStopwatch(ctx context.Context, user *user_model.User, issue *Issue) (ok bool, err error) {
+ err = db.WithTx(ctx, func(ctx context.Context) error {
+ e := db.GetEngine(ctx)
+ sw, exists, err := getStopwatch(ctx, user.ID, issue.ID)
+ if err != nil {
return err
+ } else if !exists {
+ return nil
}
- if err := issue.LoadRepo(ctx); err != nil {
+ if err = issue.LoadRepo(ctx); err != nil {
return err
}
-
- if _, err := CreateComment(ctx, &CreateCommentOptions{
+ if _, err = e.Delete(sw); err != nil {
+ return err
+ }
+ if _, err = CreateComment(ctx, &CreateCommentOptions{
Doer: user,
Issue: issue,
Repo: issue.Repo,
@@ -286,6 +227,8 @@ func cancelStopwatch(ctx context.Context, user *user_model.User, issue *Issue) e
}); err != nil {
return err
}
- }
- return nil
+ ok = true
+ return nil
+ })
+ return ok, err
}
diff --git a/models/issues/stopwatch_test.go b/models/issues/stopwatch_test.go
index a1bf9dc931..6333c10234 100644
--- a/models/issues/stopwatch_test.go
+++ b/models/issues/stopwatch_test.go
@@ -10,7 +10,6 @@ import (
issues_model "code.gitea.io/gitea/models/issues"
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
- "code.gitea.io/gitea/modules/timeutil"
"github.com/stretchr/testify/assert"
)
@@ -18,26 +17,22 @@ import (
func TestCancelStopwatch(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
- user1, err := user_model.GetUserByID(db.DefaultContext, 1)
- assert.NoError(t, err)
-
- issue1, err := issues_model.GetIssueByID(db.DefaultContext, 1)
- assert.NoError(t, err)
- issue2, err := issues_model.GetIssueByID(db.DefaultContext, 2)
- assert.NoError(t, err)
+ user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ issue1 := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1})
- err = issues_model.CancelStopwatch(db.DefaultContext, user1, issue1)
+ ok, err := issues_model.CancelStopwatch(db.DefaultContext, user1, issue1)
assert.NoError(t, err)
+ assert.True(t, ok)
unittest.AssertNotExistsBean(t, &issues_model.Stopwatch{UserID: user1.ID, IssueID: issue1.ID})
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{Type: issues_model.CommentTypeCancelTracking, PosterID: user1.ID, IssueID: issue1.ID})
- _ = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{Type: issues_model.CommentTypeCancelTracking, PosterID: user1.ID, IssueID: issue1.ID})
-
- assert.NoError(t, issues_model.CancelStopwatch(db.DefaultContext, user1, issue2))
+ ok, err = issues_model.CancelStopwatch(db.DefaultContext, user1, issue1)
+ assert.NoError(t, err)
+ assert.False(t, ok)
}
func TestStopwatchExists(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
-
assert.True(t, issues_model.StopwatchExists(db.DefaultContext, 1, 1))
assert.False(t, issues_model.StopwatchExists(db.DefaultContext, 1, 2))
}
@@ -58,21 +53,35 @@ func TestHasUserStopwatch(t *testing.T) {
func TestCreateOrStopIssueStopwatch(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
- user2, err := user_model.GetUserByID(db.DefaultContext, 2)
+ user4 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4})
+ issue1 := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1})
+ issue3 := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 3})
+
+ // create a new stopwatch
+ ok, err := issues_model.CreateIssueStopwatch(db.DefaultContext, user4, issue1)
assert.NoError(t, err)
- org3, err := user_model.GetUserByID(db.DefaultContext, 3)
+ assert.True(t, ok)
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Stopwatch{UserID: user4.ID, IssueID: issue1.ID})
+ // should not create a second stopwatch for the same issue
+ ok, err = issues_model.CreateIssueStopwatch(db.DefaultContext, user4, issue1)
assert.NoError(t, err)
-
- issue1, err := issues_model.GetIssueByID(db.DefaultContext, 1)
+ assert.False(t, ok)
+ // on a different issue, it will finish the existing stopwatch and create a new one
+ ok, err = issues_model.CreateIssueStopwatch(db.DefaultContext, user4, issue3)
assert.NoError(t, err)
- issue2, err := issues_model.GetIssueByID(db.DefaultContext, 2)
+ assert.True(t, ok)
+ unittest.AssertNotExistsBean(t, &issues_model.Stopwatch{UserID: user4.ID, IssueID: issue1.ID})
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Stopwatch{UserID: user4.ID, IssueID: issue3.ID})
+
+ // user2 already has a stopwatch in test fixture
+ user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ issue2 := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2})
+ ok, err = issues_model.FinishIssueStopwatch(db.DefaultContext, user2, issue2)
assert.NoError(t, err)
-
- assert.NoError(t, issues_model.CreateOrStopIssueStopwatch(db.DefaultContext, org3, issue1))
- sw := unittest.AssertExistsAndLoadBean(t, &issues_model.Stopwatch{UserID: 3, IssueID: 1})
- assert.LessOrEqual(t, sw.CreatedUnix, timeutil.TimeStampNow())
-
- assert.NoError(t, issues_model.CreateOrStopIssueStopwatch(db.DefaultContext, user2, issue2))
- unittest.AssertNotExistsBean(t, &issues_model.Stopwatch{UserID: 2, IssueID: 2})
- unittest.AssertExistsAndLoadBean(t, &issues_model.TrackedTime{UserID: 2, IssueID: 2})
+ assert.True(t, ok)
+ unittest.AssertNotExistsBean(t, &issues_model.Stopwatch{UserID: user2.ID, IssueID: issue2.ID})
+ unittest.AssertExistsAndLoadBean(t, &issues_model.TrackedTime{UserID: user2.ID, IssueID: issue2.ID})
+ ok, err = issues_model.FinishIssueStopwatch(db.DefaultContext, user2, issue2)
+ assert.NoError(t, err)
+ assert.False(t, ok)
}
diff --git a/models/issues/tracked_time.go b/models/issues/tracked_time.go
index ea404d36cd..9c11881e44 100644
--- a/models/issues/tracked_time.go
+++ b/models/issues/tracked_time.go
@@ -168,35 +168,31 @@ func GetTrackedSeconds(ctx context.Context, opts FindTrackedTimesOptions) (track
// AddTime will add the given time (in seconds) to the issue
func AddTime(ctx context.Context, user *user_model.User, issue *Issue, amount int64, created time.Time) (*TrackedTime, error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return nil, err
- }
- defer committer.Close()
-
- t, err := addTime(ctx, user, issue, amount, created)
- if err != nil {
- return nil, err
- }
+ return db.WithTx2(ctx, func(ctx context.Context) (*TrackedTime, error) {
+ t, err := addTime(ctx, user, issue, amount, created)
+ if err != nil {
+ return nil, err
+ }
- if err := issue.LoadRepo(ctx); err != nil {
- return nil, err
- }
+ if err := issue.LoadRepo(ctx); err != nil {
+ return nil, err
+ }
- if _, err := CreateComment(ctx, &CreateCommentOptions{
- Issue: issue,
- Repo: issue.Repo,
- Doer: user,
- // Content before v1.21 did store the formatted string instead of seconds,
- // so use "|" as delimiter to mark the new format
- Content: fmt.Sprintf("|%d", amount),
- Type: CommentTypeAddTimeManual,
- TimeID: t.ID,
- }); err != nil {
- return nil, err
- }
+ if _, err := CreateComment(ctx, &CreateCommentOptions{
+ Issue: issue,
+ Repo: issue.Repo,
+ Doer: user,
+ // Content before v1.21 did store the formatted string instead of seconds,
+ // so use "|" as delimiter to mark the new format
+ Content: fmt.Sprintf("|%d", amount),
+ Type: CommentTypeAddTimeManual,
+ TimeID: t.ID,
+ }); err != nil {
+ return nil, err
+ }
- return t, committer.Commit()
+ return t, nil
+ })
}
func addTime(ctx context.Context, user *user_model.User, issue *Issue, amount int64, created time.Time) (*TrackedTime, error) {
@@ -241,72 +237,58 @@ func TotalTimesForEachUser(ctx context.Context, options *FindTrackedTimesOptions
// DeleteIssueUserTimes deletes times for issue
func DeleteIssueUserTimes(ctx context.Context, issue *Issue, user *user_model.User) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- opts := FindTrackedTimesOptions{
- IssueID: issue.ID,
- UserID: user.ID,
- }
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ opts := FindTrackedTimesOptions{
+ IssueID: issue.ID,
+ UserID: user.ID,
+ }
- removedTime, err := deleteTimes(ctx, opts)
- if err != nil {
- return err
- }
- if removedTime == 0 {
- return db.ErrNotExist{Resource: "tracked_time"}
- }
+ removedTime, err := deleteTimes(ctx, opts)
+ if err != nil {
+ return err
+ }
+ if removedTime == 0 {
+ return db.ErrNotExist{Resource: "tracked_time"}
+ }
- if err := issue.LoadRepo(ctx); err != nil {
- return err
- }
- if _, err := CreateComment(ctx, &CreateCommentOptions{
- Issue: issue,
- Repo: issue.Repo,
- Doer: user,
- // Content before v1.21 did store the formatted string instead of seconds,
- // so use "|" as delimiter to mark the new format
- Content: fmt.Sprintf("|%d", removedTime),
- Type: CommentTypeDeleteTimeManual,
- }); err != nil {
+ if err := issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+ _, err = CreateComment(ctx, &CreateCommentOptions{
+ Issue: issue,
+ Repo: issue.Repo,
+ Doer: user,
+ // Content before v1.21 did store the formatted string instead of seconds,
+ // so use "|" as delimiter to mark the new format
+ Content: fmt.Sprintf("|%d", removedTime),
+ Type: CommentTypeDeleteTimeManual,
+ })
return err
- }
-
- return committer.Commit()
+ })
}
// DeleteTime delete a specific Time
func DeleteTime(ctx context.Context, t *TrackedTime) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- if err := t.LoadAttributes(ctx); err != nil {
- return err
- }
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ if err := t.LoadAttributes(ctx); err != nil {
+ return err
+ }
- if err := deleteTime(ctx, t); err != nil {
- return err
- }
+ if err := deleteTime(ctx, t); err != nil {
+ return err
+ }
- if _, err := CreateComment(ctx, &CreateCommentOptions{
- Issue: t.Issue,
- Repo: t.Issue.Repo,
- Doer: t.User,
- // Content before v1.21 did store the formatted string instead of seconds,
- // so use "|" as delimiter to mark the new format
- Content: fmt.Sprintf("|%d", t.Time),
- Type: CommentTypeDeleteTimeManual,
- }); err != nil {
+ _, err := CreateComment(ctx, &CreateCommentOptions{
+ Issue: t.Issue,
+ Repo: t.Issue.Repo,
+ Doer: t.User,
+ // Content before v1.21 did store the formatted string instead of seconds,
+ // so use "|" as delimiter to mark the new format
+ Content: fmt.Sprintf("|%d", t.Time),
+ Type: CommentTypeDeleteTimeManual,
+ })
return err
- }
-
- return committer.Commit()
+ })
}
func deleteTimes(ctx context.Context, opts FindTrackedTimesOptions) (removedTime int64, err error) {
@@ -350,10 +332,7 @@ func GetIssueTotalTrackedTime(ctx context.Context, opts *IssuesOptions, isClosed
// we get the statistics in smaller chunks and get accumulates
var accum int64
for i := 0; i < len(opts.IssueIDs); {
- chunk := i + MaxQueryParameters
- if chunk > len(opts.IssueIDs) {
- chunk = len(opts.IssueIDs)
- }
+ chunk := min(i+MaxQueryParameters, len(opts.IssueIDs))
time, err := getIssueTotalTrackedTimeChunk(ctx, opts, isClosed, opts.IssueIDs[i:chunk])
if err != nil {
return 0, err
diff --git a/models/migrations/base/db.go b/models/migrations/base/db.go
index eb1c44a79e..479a46379c 100644
--- a/models/migrations/base/db.go
+++ b/models/migrations/base/db.go
@@ -52,7 +52,7 @@ func RecreateTable(sess *xorm.Session, bean any) error {
// TODO: This will not work if there are foreign keys
tableName := sess.Engine().TableName(bean)
- tempTableName := fmt.Sprintf("tmp_recreate__%s", tableName)
+ tempTableName := "tmp_recreate__" + tableName
// We need to move the old table away and create a new one with the correct columns
// We will need to do this in stages to prevent data loss
@@ -82,7 +82,7 @@ func RecreateTable(sess *xorm.Session, bean any) error {
}
newTableColumns := table.Columns()
if len(newTableColumns) == 0 {
- return fmt.Errorf("no columns in new table")
+ return errors.New("no columns in new table")
}
hasID := false
for _, column := range newTableColumns {
@@ -518,7 +518,7 @@ func ModifyColumn(x *xorm.Engine, tableName string, col *schemas.Column) error {
func removeAllWithRetry(dir string) error {
var err error
- for i := 0; i < 20; i++ {
+ for range 20 {
err = os.RemoveAll(dir)
if err == nil {
break
@@ -552,11 +552,11 @@ func deleteDB() error {
}
defer db.Close()
- if _, err = db.Exec(fmt.Sprintf("DROP DATABASE IF EXISTS %s", setting.Database.Name)); err != nil {
+ if _, err = db.Exec("DROP DATABASE IF EXISTS " + setting.Database.Name); err != nil {
return err
}
- if _, err = db.Exec(fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s", setting.Database.Name)); err != nil {
+ if _, err = db.Exec("CREATE DATABASE IF NOT EXISTS " + setting.Database.Name); err != nil {
return err
}
return nil
@@ -568,11 +568,11 @@ func deleteDB() error {
}
defer db.Close()
- if _, err = db.Exec(fmt.Sprintf("DROP DATABASE IF EXISTS %s", setting.Database.Name)); err != nil {
+ if _, err = db.Exec("DROP DATABASE IF EXISTS " + setting.Database.Name); err != nil {
return err
}
- if _, err = db.Exec(fmt.Sprintf("CREATE DATABASE %s", setting.Database.Name)); err != nil {
+ if _, err = db.Exec("CREATE DATABASE " + setting.Database.Name); err != nil {
return err
}
db.Close()
@@ -594,7 +594,7 @@ func deleteDB() error {
if !schrows.Next() {
// Create and setup a DB schema
- _, err = db.Exec(fmt.Sprintf("CREATE SCHEMA %s", setting.Database.Schema))
+ _, err = db.Exec("CREATE SCHEMA " + setting.Database.Schema)
if err != nil {
return err
}
diff --git a/models/migrations/base/tests.go b/models/migrations/base/tests.go
index fe6de9c517..33fd1df707 100644
--- a/models/migrations/base/tests.go
+++ b/models/migrations/base/tests.go
@@ -1,7 +1,6 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-//nolint:forbidigo
package base
import (
@@ -15,6 +14,7 @@ import (
"code.gitea.io/gitea/models/unittest"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/tempdir"
"code.gitea.io/gitea/modules/test"
"code.gitea.io/gitea/modules/testlogger"
@@ -105,7 +105,7 @@ func MainTest(m *testing.M) {
giteaConf := os.Getenv("GITEA_CONF")
if giteaConf == "" {
giteaConf = filepath.Join(filepath.Dir(setting.AppPath), "tests/sqlite.ini")
- fmt.Printf("Environment variable $GITEA_CONF not set - defaulting to %s\n", giteaConf)
+ _, _ = fmt.Fprintf(os.Stderr, "Environment variable $GITEA_CONF not set - defaulting to %s\n", giteaConf)
}
if !filepath.IsAbs(giteaConf) {
@@ -114,15 +114,16 @@ func MainTest(m *testing.M) {
setting.CustomConf = giteaConf
}
- tmpDataPath, err := os.MkdirTemp("", "data")
+ tmpDataPath, cleanup, err := tempdir.OsTempDir("gitea-test").MkdirTempRandom("data")
if err != nil {
testlogger.Fatalf("Unable to create temporary data path %v\n", err)
}
+ defer cleanup()
setting.CustomPath = filepath.Join(setting.AppWorkPath, "custom")
setting.AppDataPath = tmpDataPath
- unittest.InitSettings()
+ unittest.InitSettingsForTesting()
if err = git.InitFull(context.Background()); err != nil {
testlogger.Fatalf("Unable to InitFull: %v\n", err)
}
@@ -132,10 +133,7 @@ func MainTest(m *testing.M) {
exitStatus := m.Run()
if err := removeAllWithRetry(setting.RepoRootPath); err != nil {
- fmt.Fprintf(os.Stderr, "os.RemoveAll: %v\n", err)
- }
- if err := removeAllWithRetry(tmpDataPath); err != nil {
- fmt.Fprintf(os.Stderr, "os.RemoveAll: %v\n", err)
+ _, _ = fmt.Fprintf(os.Stderr, "os.RemoveAll: %v\n", err)
}
os.Exit(exitStatus)
}
diff --git a/models/migrations/migrations.go b/models/migrations/migrations.go
index 5f79a873f1..4f899453b5 100644
--- a/models/migrations/migrations.go
+++ b/models/migrations/migrations.go
@@ -6,6 +6,7 @@ package migrations
import (
"context"
+ "errors"
"fmt"
"code.gitea.io/gitea/models/migrations/v1_10"
@@ -23,6 +24,7 @@ import (
"code.gitea.io/gitea/models/migrations/v1_22"
"code.gitea.io/gitea/models/migrations/v1_23"
"code.gitea.io/gitea/models/migrations/v1_24"
+ "code.gitea.io/gitea/models/migrations/v1_25"
"code.gitea.io/gitea/models/migrations/v1_6"
"code.gitea.io/gitea/models/migrations/v1_7"
"code.gitea.io/gitea/models/migrations/v1_8"
@@ -375,6 +377,15 @@ func prepareMigrationTasks() []*migration {
newMigration(312, "Add DeleteBranchAfterMerge to AutoMerge", v1_24.AddDeleteBranchAfterMergeForAutoMerge),
newMigration(313, "Move PinOrder from issue table to a new table issue_pin", v1_24.MovePinOrderToTableIssuePin),
newMigration(314, "Update OwnerID as zero for repository level action tables", v1_24.UpdateOwnerIDOfRepoLevelActionsTables),
+ newMigration(315, "Add Ephemeral to ActionRunner", v1_24.AddEphemeralToActionRunner),
+ newMigration(316, "Add description for secrets and variables", v1_24.AddDescriptionForSecretsAndVariables),
+ newMigration(317, "Add new index for action for heatmap", v1_24.AddNewIndexForUserDashboard),
+ newMigration(318, "Add anonymous_access_mode for repo_unit", v1_24.AddRepoUnitAnonymousAccessMode),
+ newMigration(319, "Add ExclusiveOrder to Label table", v1_24.AddExclusiveOrderColumnToLabelTable),
+ newMigration(320, "Migrate two_factor_policy to login_source table", v1_24.MigrateSkipTwoFactor),
+
+ // Gitea 1.24.0 ends at database version 321
+ newMigration(321, "Use LONGTEXT for some columns and fix review_state.updated_files column", v1_25.UseLongTextInSomeColumnsAndFixBugs),
}
return preparedMigrations
}
@@ -420,7 +431,7 @@ func EnsureUpToDate(ctx context.Context, x *xorm.Engine) error {
}
if currentDB < 0 {
- return fmt.Errorf("database has not been initialized")
+ return errors.New("database has not been initialized")
}
if minDBVersion > currentDB {
diff --git a/models/migrations/migrations_test.go b/models/migrations/migrations_test.go
index e66b015b3d..8649d116f5 100644
--- a/models/migrations/migrations_test.go
+++ b/models/migrations/migrations_test.go
@@ -22,7 +22,7 @@ func TestMigrations(t *testing.T) {
assert.EqualValues(t, 71, migrationIDNumberToDBVersion(70))
- assert.EqualValues(t, []*migration{{idNumber: 70}, {idNumber: 71}}, getPendingMigrations(70, preparedMigrations))
- assert.EqualValues(t, []*migration{{idNumber: 71}}, getPendingMigrations(71, preparedMigrations))
- assert.EqualValues(t, []*migration{}, getPendingMigrations(72, preparedMigrations))
+ assert.Equal(t, []*migration{{idNumber: 70}, {idNumber: 71}}, getPendingMigrations(70, preparedMigrations))
+ assert.Equal(t, []*migration{{idNumber: 71}}, getPendingMigrations(71, preparedMigrations))
+ assert.Equal(t, []*migration{}, getPendingMigrations(72, preparedMigrations))
}
diff --git a/models/migrations/v1_10/v100.go b/models/migrations/v1_10/v100.go
index 5d2fd8e244..1742bea296 100644
--- a/models/migrations/v1_10/v100.go
+++ b/models/migrations/v1_10/v100.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_10 //nolint
+package v1_10
import (
"net/url"
diff --git a/models/migrations/v1_10/v101.go b/models/migrations/v1_10/v101.go
index f023a2a0e7..6c8dfe2486 100644
--- a/models/migrations/v1_10/v101.go
+++ b/models/migrations/v1_10/v101.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_10 //nolint
+package v1_10
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_10/v88.go b/models/migrations/v1_10/v88.go
index 7e86ac364f..eb8e81c19e 100644
--- a/models/migrations/v1_10/v88.go
+++ b/models/migrations/v1_10/v88.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_10 //nolint
+package v1_10
import (
"crypto/sha1"
diff --git a/models/migrations/v1_10/v89.go b/models/migrations/v1_10/v89.go
index d5f27ffdc6..0df2a6e17b 100644
--- a/models/migrations/v1_10/v89.go
+++ b/models/migrations/v1_10/v89.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_10 //nolint
+package v1_10
import "xorm.io/xorm"
diff --git a/models/migrations/v1_10/v90.go b/models/migrations/v1_10/v90.go
index 295d4b1c1b..5521a97e32 100644
--- a/models/migrations/v1_10/v90.go
+++ b/models/migrations/v1_10/v90.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_10 //nolint
+package v1_10
import "xorm.io/xorm"
diff --git a/models/migrations/v1_10/v91.go b/models/migrations/v1_10/v91.go
index 48cac2de70..08db6c2742 100644
--- a/models/migrations/v1_10/v91.go
+++ b/models/migrations/v1_10/v91.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_10 //nolint
+package v1_10
import "xorm.io/xorm"
diff --git a/models/migrations/v1_10/v92.go b/models/migrations/v1_10/v92.go
index 9080108594..b6c04a9234 100644
--- a/models/migrations/v1_10/v92.go
+++ b/models/migrations/v1_10/v92.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_10 //nolint
+package v1_10
import (
"xorm.io/builder"
diff --git a/models/migrations/v1_10/v93.go b/models/migrations/v1_10/v93.go
index ee59a8db39..c131be9a8d 100644
--- a/models/migrations/v1_10/v93.go
+++ b/models/migrations/v1_10/v93.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_10 //nolint
+package v1_10
import "xorm.io/xorm"
diff --git a/models/migrations/v1_10/v94.go b/models/migrations/v1_10/v94.go
index c131af162b..13b7d7b303 100644
--- a/models/migrations/v1_10/v94.go
+++ b/models/migrations/v1_10/v94.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_10 //nolint
+package v1_10
import "xorm.io/xorm"
diff --git a/models/migrations/v1_10/v95.go b/models/migrations/v1_10/v95.go
index 3b1f67fd9c..86b52026bf 100644
--- a/models/migrations/v1_10/v95.go
+++ b/models/migrations/v1_10/v95.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_10 //nolint
+package v1_10
import "xorm.io/xorm"
diff --git a/models/migrations/v1_10/v96.go b/models/migrations/v1_10/v96.go
index 34c8240031..ca35a169c4 100644
--- a/models/migrations/v1_10/v96.go
+++ b/models/migrations/v1_10/v96.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_10 //nolint
+package v1_10
import (
"path/filepath"
diff --git a/models/migrations/v1_10/v97.go b/models/migrations/v1_10/v97.go
index dee45b32e3..5872bb63e5 100644
--- a/models/migrations/v1_10/v97.go
+++ b/models/migrations/v1_10/v97.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_10 //nolint
+package v1_10
import "xorm.io/xorm"
diff --git a/models/migrations/v1_10/v98.go b/models/migrations/v1_10/v98.go
index bdd9aed089..d21c326459 100644
--- a/models/migrations/v1_10/v98.go
+++ b/models/migrations/v1_10/v98.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_10 //nolint
+package v1_10
import "xorm.io/xorm"
diff --git a/models/migrations/v1_10/v99.go b/models/migrations/v1_10/v99.go
index ebe6597f7c..223c188057 100644
--- a/models/migrations/v1_10/v99.go
+++ b/models/migrations/v1_10/v99.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_10 //nolint
+package v1_10
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_11/v102.go b/models/migrations/v1_11/v102.go
index 9358e4cef3..e52290afb0 100644
--- a/models/migrations/v1_11/v102.go
+++ b/models/migrations/v1_11/v102.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_11 //nolint
+package v1_11
import (
"code.gitea.io/gitea/models/migrations/base"
diff --git a/models/migrations/v1_11/v103.go b/models/migrations/v1_11/v103.go
index 53527dac58..a515710160 100644
--- a/models/migrations/v1_11/v103.go
+++ b/models/migrations/v1_11/v103.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_11 //nolint
+package v1_11
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_11/v104.go b/models/migrations/v1_11/v104.go
index 3e8ee64bc1..3b0d3c64b2 100644
--- a/models/migrations/v1_11/v104.go
+++ b/models/migrations/v1_11/v104.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_11 //nolint
+package v1_11
import (
"code.gitea.io/gitea/models/migrations/base"
diff --git a/models/migrations/v1_11/v105.go b/models/migrations/v1_11/v105.go
index b91340c30a..d86973a0f6 100644
--- a/models/migrations/v1_11/v105.go
+++ b/models/migrations/v1_11/v105.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_11 //nolint
+package v1_11
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_11/v106.go b/models/migrations/v1_11/v106.go
index ecb11cdd1e..edffe18683 100644
--- a/models/migrations/v1_11/v106.go
+++ b/models/migrations/v1_11/v106.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_11 //nolint
+package v1_11
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_11/v107.go b/models/migrations/v1_11/v107.go
index f0bfe5862c..a158e3bb50 100644
--- a/models/migrations/v1_11/v107.go
+++ b/models/migrations/v1_11/v107.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_11 //nolint
+package v1_11
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_11/v108.go b/models/migrations/v1_11/v108.go
index a85096234d..8f14504ceb 100644
--- a/models/migrations/v1_11/v108.go
+++ b/models/migrations/v1_11/v108.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_11 //nolint
+package v1_11
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_11/v109.go b/models/migrations/v1_11/v109.go
index ea565ccda3..f7616aec7b 100644
--- a/models/migrations/v1_11/v109.go
+++ b/models/migrations/v1_11/v109.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_11 //nolint
+package v1_11
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_11/v110.go b/models/migrations/v1_11/v110.go
index 81afa1331d..512f728c03 100644
--- a/models/migrations/v1_11/v110.go
+++ b/models/migrations/v1_11/v110.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_11 //nolint
+package v1_11
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_11/v111.go b/models/migrations/v1_11/v111.go
index ff108479a9..c27465f051 100644
--- a/models/migrations/v1_11/v111.go
+++ b/models/migrations/v1_11/v111.go
@@ -1,10 +1,11 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_11 //nolint
+package v1_11
import (
"fmt"
+ "slices"
"xorm.io/xorm"
)
@@ -344,10 +345,8 @@ func AddBranchProtectionCanPushAndEnableWhitelist(x *xorm.Engine) error {
}
return AccessModeWrite <= perm.UnitsMode[UnitTypeCode], nil
}
- for _, id := range protectedBranch.ApprovalsWhitelistUserIDs {
- if id == reviewer.ID {
- return true, nil
- }
+ if slices.Contains(protectedBranch.ApprovalsWhitelistUserIDs, reviewer.ID) {
+ return true, nil
}
// isUserInTeams
diff --git a/models/migrations/v1_11/v112.go b/models/migrations/v1_11/v112.go
index 0857663119..fe45cf9222 100644
--- a/models/migrations/v1_11/v112.go
+++ b/models/migrations/v1_11/v112.go
@@ -1,12 +1,12 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_11 //nolint
+package v1_11
import (
- "fmt"
"path/filepath"
+ "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
@@ -31,7 +31,7 @@ func RemoveAttachmentMissedRepo(x *xorm.Engine) error {
for i := 0; i < len(attachments); i++ {
uuid := attachments[i].UUID
if err = util.RemoveAll(filepath.Join(setting.Attachment.Storage.Path, uuid[0:1], uuid[1:2], uuid)); err != nil {
- fmt.Printf("Error: %v", err) //nolint:forbidigo
+ log.Warn("Unable to remove attachment file by UUID %s: %v", uuid, err)
}
}
diff --git a/models/migrations/v1_11/v113.go b/models/migrations/v1_11/v113.go
index dea344a44f..a4d54f66fb 100644
--- a/models/migrations/v1_11/v113.go
+++ b/models/migrations/v1_11/v113.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_11 //nolint
+package v1_11
import (
"fmt"
diff --git a/models/migrations/v1_11/v114.go b/models/migrations/v1_11/v114.go
index 95adcee989..9467a8a90c 100644
--- a/models/migrations/v1_11/v114.go
+++ b/models/migrations/v1_11/v114.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_11 //nolint
+package v1_11
import (
"net/url"
diff --git a/models/migrations/v1_11/v115.go b/models/migrations/v1_11/v115.go
index 8c631cfd0b..5933c0520f 100644
--- a/models/migrations/v1_11/v115.go
+++ b/models/migrations/v1_11/v115.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_11 //nolint
+package v1_11
import (
"crypto/md5"
@@ -146,7 +146,7 @@ func copyOldAvatarToNewLocation(userID int64, oldAvatar string) (string, error)
return "", fmt.Errorf("io.ReadAll: %w", err)
}
- newAvatar := fmt.Sprintf("%x", md5.Sum([]byte(fmt.Sprintf("%d-%x", userID, md5.Sum(data)))))
+ newAvatar := fmt.Sprintf("%x", md5.Sum(fmt.Appendf(nil, "%d-%x", userID, md5.Sum(data))))
if newAvatar == oldAvatar {
return newAvatar, nil
}
diff --git a/models/migrations/v1_11/v116.go b/models/migrations/v1_11/v116.go
index 85aa76c1e0..729fbad18b 100644
--- a/models/migrations/v1_11/v116.go
+++ b/models/migrations/v1_11/v116.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_11 //nolint
+package v1_11
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_12/v117.go b/models/migrations/v1_12/v117.go
index 8eadcdef2b..73b58ca34b 100644
--- a/models/migrations/v1_12/v117.go
+++ b/models/migrations/v1_12/v117.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_12/v118.go b/models/migrations/v1_12/v118.go
index eb022dc5e4..e8b4249743 100644
--- a/models/migrations/v1_12/v118.go
+++ b/models/migrations/v1_12/v118.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_12/v119.go b/models/migrations/v1_12/v119.go
index 60bfe6a57d..b4bf29a935 100644
--- a/models/migrations/v1_12/v119.go
+++ b/models/migrations/v1_12/v119.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_12/v120.go b/models/migrations/v1_12/v120.go
index 3f7ed8d373..14d515f5a7 100644
--- a/models/migrations/v1_12/v120.go
+++ b/models/migrations/v1_12/v120.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_12/v121.go b/models/migrations/v1_12/v121.go
index 175ec9164d..a28ae4e1c9 100644
--- a/models/migrations/v1_12/v121.go
+++ b/models/migrations/v1_12/v121.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import "xorm.io/xorm"
diff --git a/models/migrations/v1_12/v122.go b/models/migrations/v1_12/v122.go
index 6e31d863a1..bc1b175f6a 100644
--- a/models/migrations/v1_12/v122.go
+++ b/models/migrations/v1_12/v122.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_12/v123.go b/models/migrations/v1_12/v123.go
index b0c3af07a3..52b10bb850 100644
--- a/models/migrations/v1_12/v123.go
+++ b/models/migrations/v1_12/v123.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_12/v124.go b/models/migrations/v1_12/v124.go
index d2ba03ffe0..9a93f436d4 100644
--- a/models/migrations/v1_12/v124.go
+++ b/models/migrations/v1_12/v124.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_12/v125.go b/models/migrations/v1_12/v125.go
index ec4ffaab25..7f582ecff5 100644
--- a/models/migrations/v1_12/v125.go
+++ b/models/migrations/v1_12/v125.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"fmt"
diff --git a/models/migrations/v1_12/v126.go b/models/migrations/v1_12/v126.go
index ca9ec3aa3f..64fd7f7478 100644
--- a/models/migrations/v1_12/v126.go
+++ b/models/migrations/v1_12/v126.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"xorm.io/builder"
diff --git a/models/migrations/v1_12/v127.go b/models/migrations/v1_12/v127.go
index 00e391dc87..9bd78db95e 100644
--- a/models/migrations/v1_12/v127.go
+++ b/models/migrations/v1_12/v127.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"fmt"
diff --git a/models/migrations/v1_12/v128.go b/models/migrations/v1_12/v128.go
index cba64711d0..e7dbff3766 100644
--- a/models/migrations/v1_12/v128.go
+++ b/models/migrations/v1_12/v128.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"fmt"
diff --git a/models/migrations/v1_12/v129.go b/models/migrations/v1_12/v129.go
index cf228242b9..3e4d3aca68 100644
--- a/models/migrations/v1_12/v129.go
+++ b/models/migrations/v1_12/v129.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_12/v130.go b/models/migrations/v1_12/v130.go
index 391810c7ca..107bb756fd 100644
--- a/models/migrations/v1_12/v130.go
+++ b/models/migrations/v1_12/v130.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"code.gitea.io/gitea/modules/json"
diff --git a/models/migrations/v1_12/v131.go b/models/migrations/v1_12/v131.go
index 5184bc3590..1266c2f185 100644
--- a/models/migrations/v1_12/v131.go
+++ b/models/migrations/v1_12/v131.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"fmt"
diff --git a/models/migrations/v1_12/v132.go b/models/migrations/v1_12/v132.go
index 3b2b28f7ab..8b1ae6db93 100644
--- a/models/migrations/v1_12/v132.go
+++ b/models/migrations/v1_12/v132.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"fmt"
diff --git a/models/migrations/v1_12/v133.go b/models/migrations/v1_12/v133.go
index c9087fc8c1..69e20597d8 100644
--- a/models/migrations/v1_12/v133.go
+++ b/models/migrations/v1_12/v133.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import "xorm.io/xorm"
diff --git a/models/migrations/v1_12/v134.go b/models/migrations/v1_12/v134.go
index a918d38757..09d743964d 100644
--- a/models/migrations/v1_12/v134.go
+++ b/models/migrations/v1_12/v134.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"fmt"
diff --git a/models/migrations/v1_12/v135.go b/models/migrations/v1_12/v135.go
index 8898011df5..5df0ad7fc4 100644
--- a/models/migrations/v1_12/v135.go
+++ b/models/migrations/v1_12/v135.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"fmt"
diff --git a/models/migrations/v1_12/v136.go b/models/migrations/v1_12/v136.go
index d91ff92feb..0f53278b46 100644
--- a/models/migrations/v1_12/v136.go
+++ b/models/migrations/v1_12/v136.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"fmt"
diff --git a/models/migrations/v1_12/v137.go b/models/migrations/v1_12/v137.go
index 0d86b72010..9d38483488 100644
--- a/models/migrations/v1_12/v137.go
+++ b/models/migrations/v1_12/v137.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_12/v138.go b/models/migrations/v1_12/v138.go
index 8c8d353f40..4485adeb2d 100644
--- a/models/migrations/v1_12/v138.go
+++ b/models/migrations/v1_12/v138.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"fmt"
diff --git a/models/migrations/v1_12/v139.go b/models/migrations/v1_12/v139.go
index 279aa7df87..a3799841ac 100644
--- a/models/migrations/v1_12/v139.go
+++ b/models/migrations/v1_12/v139.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_12 //nolint
+package v1_12
import (
"code.gitea.io/gitea/modules/setting"
diff --git a/models/migrations/v1_13/v140.go b/models/migrations/v1_13/v140.go
index 2d3337012d..a9a047bca9 100644
--- a/models/migrations/v1_13/v140.go
+++ b/models/migrations/v1_13/v140.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_13 //nolint
+package v1_13
import (
"fmt"
@@ -21,12 +21,7 @@ func FixLanguageStatsToSaveSize(x *xorm.Engine) error {
// RepoIndexerType specifies the repository indexer type
type RepoIndexerType int
- const (
- // RepoIndexerTypeCode code indexer - 0
- RepoIndexerTypeCode RepoIndexerType = iota //nolint:unused
- // RepoIndexerTypeStats repository stats indexer - 1
- RepoIndexerTypeStats
- )
+ const RepoIndexerTypeStats RepoIndexerType = 1
// RepoIndexerStatus see models/repo_indexer.go
type RepoIndexerStatus struct {
@@ -46,7 +41,7 @@ func FixLanguageStatsToSaveSize(x *xorm.Engine) error {
}
// Delete language stats
- if _, err := x.Exec(fmt.Sprintf("%s language_stat", truncExpr)); err != nil {
+ if _, err := x.Exec(truncExpr + " language_stat"); err != nil {
return err
}
diff --git a/models/migrations/v1_13/v141.go b/models/migrations/v1_13/v141.go
index ae211e0e44..b54bc1727c 100644
--- a/models/migrations/v1_13/v141.go
+++ b/models/migrations/v1_13/v141.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_13 //nolint
+package v1_13
import (
"fmt"
diff --git a/models/migrations/v1_13/v142.go b/models/migrations/v1_13/v142.go
index 7c7c01ad47..d08a0ae0bf 100644
--- a/models/migrations/v1_13/v142.go
+++ b/models/migrations/v1_13/v142.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_13 //nolint
+package v1_13
import (
"code.gitea.io/gitea/modules/log"
diff --git a/models/migrations/v1_13/v143.go b/models/migrations/v1_13/v143.go
index 885768dff3..b9a856ed0f 100644
--- a/models/migrations/v1_13/v143.go
+++ b/models/migrations/v1_13/v143.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_13 //nolint
+package v1_13
import (
"code.gitea.io/gitea/modules/log"
diff --git a/models/migrations/v1_13/v144.go b/models/migrations/v1_13/v144.go
index f5a0bc5751..9352d78bc8 100644
--- a/models/migrations/v1_13/v144.go
+++ b/models/migrations/v1_13/v144.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_13 //nolint
+package v1_13
import (
"code.gitea.io/gitea/modules/log"
diff --git a/models/migrations/v1_13/v145.go b/models/migrations/v1_13/v145.go
index 8acb29bf33..86ebb4f9d9 100644
--- a/models/migrations/v1_13/v145.go
+++ b/models/migrations/v1_13/v145.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_13 //nolint
+package v1_13
import (
"fmt"
@@ -42,7 +42,7 @@ func IncreaseLanguageField(x *xorm.Engine) error {
switch {
case setting.Database.Type.IsMySQL():
- if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE language_stat MODIFY COLUMN language %s", sqlType)); err != nil {
+ if _, err := sess.Exec("ALTER TABLE language_stat MODIFY COLUMN language " + sqlType); err != nil {
return err
}
case setting.Database.Type.IsMSSQL():
@@ -64,7 +64,7 @@ func IncreaseLanguageField(x *xorm.Engine) error {
return fmt.Errorf("Drop table `language_stat` constraint `%s`: %w", constraint, err)
}
}
- if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE language_stat ALTER COLUMN language %s", sqlType)); err != nil {
+ if _, err := sess.Exec("ALTER TABLE language_stat ALTER COLUMN language " + sqlType); err != nil {
return err
}
// Finally restore the constraint
@@ -72,7 +72,7 @@ func IncreaseLanguageField(x *xorm.Engine) error {
return err
}
case setting.Database.Type.IsPostgreSQL():
- if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE language_stat ALTER COLUMN language TYPE %s", sqlType)); err != nil {
+ if _, err := sess.Exec("ALTER TABLE language_stat ALTER COLUMN language TYPE " + sqlType); err != nil {
return err
}
}
diff --git a/models/migrations/v1_13/v146.go b/models/migrations/v1_13/v146.go
index 7d9a878704..355c772c26 100644
--- a/models/migrations/v1_13/v146.go
+++ b/models/migrations/v1_13/v146.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_13 //nolint
+package v1_13
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_13/v147.go b/models/migrations/v1_13/v147.go
index 510ef39b28..0059c06220 100644
--- a/models/migrations/v1_13/v147.go
+++ b/models/migrations/v1_13/v147.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_13 //nolint
+package v1_13
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_13/v148.go b/models/migrations/v1_13/v148.go
index 7bb8ab700b..d276db3d61 100644
--- a/models/migrations/v1_13/v148.go
+++ b/models/migrations/v1_13/v148.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_13 //nolint
+package v1_13
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_13/v149.go b/models/migrations/v1_13/v149.go
index 2a1db04cbb..a96b8e5ac7 100644
--- a/models/migrations/v1_13/v149.go
+++ b/models/migrations/v1_13/v149.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_13 //nolint
+package v1_13
import (
"fmt"
diff --git a/models/migrations/v1_13/v150.go b/models/migrations/v1_13/v150.go
index d5ba489566..590ea72903 100644
--- a/models/migrations/v1_13/v150.go
+++ b/models/migrations/v1_13/v150.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_13 //nolint
+package v1_13
import (
"code.gitea.io/gitea/models/migrations/base"
diff --git a/models/migrations/v1_13/v151.go b/models/migrations/v1_13/v151.go
index 25af1d03ec..454929534f 100644
--- a/models/migrations/v1_13/v151.go
+++ b/models/migrations/v1_13/v151.go
@@ -1,10 +1,11 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_13 //nolint
+package v1_13
import (
"context"
+ "errors"
"fmt"
"strings"
@@ -113,7 +114,7 @@ func SetDefaultPasswordToArgon2(x *xorm.Engine) error {
newTableColumns := table.Columns()
if len(newTableColumns) == 0 {
- return fmt.Errorf("no columns in new table")
+ return errors.New("no columns in new table")
}
hasID := false
for _, column := range newTableColumns {
diff --git a/models/migrations/v1_13/v152.go b/models/migrations/v1_13/v152.go
index 502c82a40d..648e26446f 100644
--- a/models/migrations/v1_13/v152.go
+++ b/models/migrations/v1_13/v152.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_13 //nolint
+package v1_13
import "xorm.io/xorm"
diff --git a/models/migrations/v1_13/v153.go b/models/migrations/v1_13/v153.go
index 0b2dd3eb62..e5462fc162 100644
--- a/models/migrations/v1_13/v153.go
+++ b/models/migrations/v1_13/v153.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_13 //nolint
+package v1_13
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_13/v154.go b/models/migrations/v1_13/v154.go
index 60cc56713e..5477d1b889 100644
--- a/models/migrations/v1_13/v154.go
+++ b/models/migrations/v1_13/v154.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_13 //nolint
+package v1_13
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_14/main_test.go b/models/migrations/v1_14/main_test.go
index 7a091b9b9a..978f88577c 100644
--- a/models/migrations/v1_14/main_test.go
+++ b/models/migrations/v1_14/main_test.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"testing"
diff --git a/models/migrations/v1_14/v155.go b/models/migrations/v1_14/v155.go
index e814f59938..505a9ae033 100644
--- a/models/migrations/v1_14/v155.go
+++ b/models/migrations/v1_14/v155.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"fmt"
diff --git a/models/migrations/v1_14/v156.go b/models/migrations/v1_14/v156.go
index 2cf4954a15..2fa5819610 100644
--- a/models/migrations/v1_14/v156.go
+++ b/models/migrations/v1_14/v156.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"fmt"
diff --git a/models/migrations/v1_14/v157.go b/models/migrations/v1_14/v157.go
index 7187278d29..2c5625ebbd 100644
--- a/models/migrations/v1_14/v157.go
+++ b/models/migrations/v1_14/v157.go
@@ -1,24 +1,13 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"xorm.io/xorm"
)
func FixRepoTopics(x *xorm.Engine) error {
- type Topic struct { //nolint:unused
- ID int64 `xorm:"pk autoincr"`
- Name string `xorm:"UNIQUE VARCHAR(25)"`
- RepoCount int
- }
-
- type RepoTopic struct { //nolint:unused
- RepoID int64 `xorm:"pk"`
- TopicID int64 `xorm:"pk"`
- }
-
type Repository struct {
ID int64 `xorm:"pk autoincr"`
Topics []string `xorm:"TEXT JSON"`
diff --git a/models/migrations/v1_14/v158.go b/models/migrations/v1_14/v158.go
index 1094d8abf7..3c57e8e3da 100644
--- a/models/migrations/v1_14/v158.go
+++ b/models/migrations/v1_14/v158.go
@@ -1,10 +1,10 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
- "fmt"
+ "errors"
"strconv"
"code.gitea.io/gitea/modules/log"
@@ -82,7 +82,7 @@ func UpdateCodeCommentReplies(x *xorm.Engine) error {
sqlCmd = "SELECT TOP " + strconv.Itoa(batchSize) + " * FROM #temp_comments WHERE " +
"(id NOT IN ( SELECT TOP " + strconv.Itoa(start) + " id FROM #temp_comments ORDER BY id )) ORDER BY id"
default:
- return fmt.Errorf("Unsupported database type")
+ return errors.New("Unsupported database type")
}
if err := sess.SQL(sqlCmd).Find(&comments); err != nil {
diff --git a/models/migrations/v1_14/v159.go b/models/migrations/v1_14/v159.go
index 149ae0f6a8..e6f6f0f061 100644
--- a/models/migrations/v1_14/v159.go
+++ b/models/migrations/v1_14/v159.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"code.gitea.io/gitea/models/migrations/base"
diff --git a/models/migrations/v1_14/v160.go b/models/migrations/v1_14/v160.go
index 4dea91b514..73f3798954 100644
--- a/models/migrations/v1_14/v160.go
+++ b/models/migrations/v1_14/v160.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_14/v161.go b/models/migrations/v1_14/v161.go
index ac7e821a80..eb92dee77c 100644
--- a/models/migrations/v1_14/v161.go
+++ b/models/migrations/v1_14/v161.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"context"
diff --git a/models/migrations/v1_14/v162.go b/models/migrations/v1_14/v162.go
index 2e4e0b8eb0..a0ddd36d55 100644
--- a/models/migrations/v1_14/v162.go
+++ b/models/migrations/v1_14/v162.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"code.gitea.io/gitea/models/migrations/base"
diff --git a/models/migrations/v1_14/v163.go b/models/migrations/v1_14/v163.go
index 0cd8ba68c8..84c35190b7 100644
--- a/models/migrations/v1_14/v163.go
+++ b/models/migrations/v1_14/v163.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"code.gitea.io/gitea/models/migrations/base"
diff --git a/models/migrations/v1_14/v164.go b/models/migrations/v1_14/v164.go
index 54f6951427..d2fd9b8464 100644
--- a/models/migrations/v1_14/v164.go
+++ b/models/migrations/v1_14/v164.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"fmt"
diff --git a/models/migrations/v1_14/v165.go b/models/migrations/v1_14/v165.go
index 926350cdf7..6e1b34156b 100644
--- a/models/migrations/v1_14/v165.go
+++ b/models/migrations/v1_14/v165.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"code.gitea.io/gitea/models/migrations/base"
@@ -16,10 +16,7 @@ func ConvertHookTaskTypeToVarcharAndTrim(x *xorm.Engine) error {
return nil
}
- type HookTask struct { //nolint:unused
- Typ string `xorm:"VARCHAR(16) index"`
- }
-
+ // HookTask: Typ string `xorm:"VARCHAR(16) index"`
if err := base.ModifyColumn(x, "hook_task", &schemas.Column{
Name: "typ",
SQLType: schemas.SQLType{
@@ -42,10 +39,7 @@ func ConvertHookTaskTypeToVarcharAndTrim(x *xorm.Engine) error {
return err
}
- type Webhook struct { //nolint:unused
- Type string `xorm:"VARCHAR(16) index"`
- }
-
+ // Webhook: Type string `xorm:"VARCHAR(16) index"`
if err := base.ModifyColumn(x, "webhook", &schemas.Column{
Name: "type",
SQLType: schemas.SQLType{
diff --git a/models/migrations/v1_14/v166.go b/models/migrations/v1_14/v166.go
index e5731582fd..4c106bd7da 100644
--- a/models/migrations/v1_14/v166.go
+++ b/models/migrations/v1_14/v166.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"crypto/sha256"
diff --git a/models/migrations/v1_14/v167.go b/models/migrations/v1_14/v167.go
index 9d416f6a32..d77bbc401e 100644
--- a/models/migrations/v1_14/v167.go
+++ b/models/migrations/v1_14/v167.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"fmt"
diff --git a/models/migrations/v1_14/v168.go b/models/migrations/v1_14/v168.go
index a30a8859f7..aa93eec19b 100644
--- a/models/migrations/v1_14/v168.go
+++ b/models/migrations/v1_14/v168.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import "xorm.io/xorm"
diff --git a/models/migrations/v1_14/v169.go b/models/migrations/v1_14/v169.go
index 5b81bb58b1..4f9df0d96f 100644
--- a/models/migrations/v1_14/v169.go
+++ b/models/migrations/v1_14/v169.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_14/v170.go b/models/migrations/v1_14/v170.go
index 7b6498a3e9..a2ff4623e1 100644
--- a/models/migrations/v1_14/v170.go
+++ b/models/migrations/v1_14/v170.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"fmt"
diff --git a/models/migrations/v1_14/v171.go b/models/migrations/v1_14/v171.go
index 51a35a02ad..7b200e960a 100644
--- a/models/migrations/v1_14/v171.go
+++ b/models/migrations/v1_14/v171.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"fmt"
diff --git a/models/migrations/v1_14/v172.go b/models/migrations/v1_14/v172.go
index 0f9bef902a..bbd61d87b2 100644
--- a/models/migrations/v1_14/v172.go
+++ b/models/migrations/v1_14/v172.go
@@ -1,7 +1,7 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_14/v173.go b/models/migrations/v1_14/v173.go
index 2d9eee9197..7752fbe966 100644
--- a/models/migrations/v1_14/v173.go
+++ b/models/migrations/v1_14/v173.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"fmt"
diff --git a/models/migrations/v1_14/v174.go b/models/migrations/v1_14/v174.go
index c839e15db8..4049e43070 100644
--- a/models/migrations/v1_14/v174.go
+++ b/models/migrations/v1_14/v174.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"fmt"
diff --git a/models/migrations/v1_14/v175.go b/models/migrations/v1_14/v175.go
index 70d72b2600..92ed130473 100644
--- a/models/migrations/v1_14/v175.go
+++ b/models/migrations/v1_14/v175.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"fmt"
diff --git a/models/migrations/v1_14/v176.go b/models/migrations/v1_14/v176.go
index 1ed49f75fa..ef5dce9a02 100644
--- a/models/migrations/v1_14/v176.go
+++ b/models/migrations/v1_14/v176.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_14/v176_test.go b/models/migrations/v1_14/v176_test.go
index ea3e750d7f..5c1db4db71 100644
--- a/models/migrations/v1_14/v176_test.go
+++ b/models/migrations/v1_14/v176_test.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"testing"
diff --git a/models/migrations/v1_14/v177.go b/models/migrations/v1_14/v177.go
index 6e1838f369..96676bf8d9 100644
--- a/models/migrations/v1_14/v177.go
+++ b/models/migrations/v1_14/v177.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"fmt"
diff --git a/models/migrations/v1_14/v177_test.go b/models/migrations/v1_14/v177_test.go
index 5568a18fec..263f69f338 100644
--- a/models/migrations/v1_14/v177_test.go
+++ b/models/migrations/v1_14/v177_test.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_14 //nolint
+package v1_14
import (
"testing"
diff --git a/models/migrations/v1_15/main_test.go b/models/migrations/v1_15/main_test.go
index 366f19788e..d01585e997 100644
--- a/models/migrations/v1_15/main_test.go
+++ b/models/migrations/v1_15/main_test.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_15 //nolint
+package v1_15
import (
"testing"
diff --git a/models/migrations/v1_15/v178.go b/models/migrations/v1_15/v178.go
index 6d236eb049..ca3a5c262e 100644
--- a/models/migrations/v1_15/v178.go
+++ b/models/migrations/v1_15/v178.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_15 //nolint
+package v1_15
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_15/v179.go b/models/migrations/v1_15/v179.go
index f6b142eb42..d6fb86ffec 100644
--- a/models/migrations/v1_15/v179.go
+++ b/models/migrations/v1_15/v179.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_15 //nolint
+package v1_15
import (
"code.gitea.io/gitea/models/migrations/base"
diff --git a/models/migrations/v1_15/v180.go b/models/migrations/v1_15/v180.go
index c71e771861..dd132f8330 100644
--- a/models/migrations/v1_15/v180.go
+++ b/models/migrations/v1_15/v180.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_15 //nolint
+package v1_15
import (
"code.gitea.io/gitea/modules/json"
diff --git a/models/migrations/v1_15/v181.go b/models/migrations/v1_15/v181.go
index 2185ed0213..fb1d3d7a75 100644
--- a/models/migrations/v1_15/v181.go
+++ b/models/migrations/v1_15/v181.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_15 //nolint
+package v1_15
import (
"strings"
diff --git a/models/migrations/v1_15/v181_test.go b/models/migrations/v1_15/v181_test.go
index 1b075be7a0..73b5c1f3d6 100644
--- a/models/migrations/v1_15/v181_test.go
+++ b/models/migrations/v1_15/v181_test.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_15 //nolint
+package v1_15
import (
"strings"
@@ -49,7 +49,7 @@ func Test_AddPrimaryEmail2EmailAddress(t *testing.T) {
assert.NoError(t, err)
assert.True(t, has)
assert.True(t, emailAddress.IsPrimary)
- assert.EqualValues(t, user.IsActive, emailAddress.IsActivated)
- assert.EqualValues(t, user.ID, emailAddress.UID)
+ assert.Equal(t, user.IsActive, emailAddress.IsActivated)
+ assert.Equal(t, user.ID, emailAddress.UID)
}
}
diff --git a/models/migrations/v1_15/v182.go b/models/migrations/v1_15/v182.go
index 9ca500c0f9..f53ff11df9 100644
--- a/models/migrations/v1_15/v182.go
+++ b/models/migrations/v1_15/v182.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_15 //nolint
+package v1_15
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_15/v182_test.go b/models/migrations/v1_15/v182_test.go
index 75ef8e1cd8..5fc6a0c467 100644
--- a/models/migrations/v1_15/v182_test.go
+++ b/models/migrations/v1_15/v182_test.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_15 //nolint
+package v1_15
import (
"testing"
diff --git a/models/migrations/v1_15/v183.go b/models/migrations/v1_15/v183.go
index effad1b467..5d0582f03d 100644
--- a/models/migrations/v1_15/v183.go
+++ b/models/migrations/v1_15/v183.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_15 //nolint
+package v1_15
import (
"fmt"
diff --git a/models/migrations/v1_15/v184.go b/models/migrations/v1_15/v184.go
index 4b3dd1467a..2823bc1f7a 100644
--- a/models/migrations/v1_15/v184.go
+++ b/models/migrations/v1_15/v184.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_15 //nolint
+package v1_15
import (
"context"
diff --git a/models/migrations/v1_15/v185.go b/models/migrations/v1_15/v185.go
index e5878ec193..60af59edca 100644
--- a/models/migrations/v1_15/v185.go
+++ b/models/migrations/v1_15/v185.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_15 //nolint
+package v1_15
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_15/v186.go b/models/migrations/v1_15/v186.go
index 01aab3add5..67dc97d13d 100644
--- a/models/migrations/v1_15/v186.go
+++ b/models/migrations/v1_15/v186.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_15 //nolint
+package v1_15
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_15/v187.go b/models/migrations/v1_15/v187.go
index 21cd6772b7..5fd90c65fb 100644
--- a/models/migrations/v1_15/v187.go
+++ b/models/migrations/v1_15/v187.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_15 //nolint
+package v1_15
import (
"code.gitea.io/gitea/models/migrations/base"
diff --git a/models/migrations/v1_15/v188.go b/models/migrations/v1_15/v188.go
index 71e45cab0e..4494e6ff05 100644
--- a/models/migrations/v1_15/v188.go
+++ b/models/migrations/v1_15/v188.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_15 //nolint
+package v1_15
import "xorm.io/xorm"
diff --git a/models/migrations/v1_16/main_test.go b/models/migrations/v1_16/main_test.go
index 817a0c13a4..7f93d6e9e5 100644
--- a/models/migrations/v1_16/main_test.go
+++ b/models/migrations/v1_16/main_test.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"testing"
diff --git a/models/migrations/v1_16/v189.go b/models/migrations/v1_16/v189.go
index 5649645051..6bc99e58ab 100644
--- a/models/migrations/v1_16/v189.go
+++ b/models/migrations/v1_16/v189.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"encoding/binary"
diff --git a/models/migrations/v1_16/v189_test.go b/models/migrations/v1_16/v189_test.go
index 32ef821d27..fb56ac8e11 100644
--- a/models/migrations/v1_16/v189_test.go
+++ b/models/migrations/v1_16/v189_test.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"testing"
@@ -75,8 +75,8 @@ func Test_UnwrapLDAPSourceCfg(t *testing.T) {
return
}
- assert.EqualValues(t, expected, converted, "UnwrapLDAPSourceCfg failed for %d", source.ID)
- assert.EqualValues(t, source.ID%2 == 0, source.IsActive, "UnwrapLDAPSourceCfg failed for %d", source.ID)
+ assert.Equal(t, expected, converted, "UnwrapLDAPSourceCfg failed for %d", source.ID)
+ assert.Equal(t, source.ID%2 == 0, source.IsActive, "UnwrapLDAPSourceCfg failed for %d", source.ID)
}
}
}
diff --git a/models/migrations/v1_16/v190.go b/models/migrations/v1_16/v190.go
index 5953802849..1eb6b6ddb4 100644
--- a/models/migrations/v1_16/v190.go
+++ b/models/migrations/v1_16/v190.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"fmt"
diff --git a/models/migrations/v1_16/v191.go b/models/migrations/v1_16/v191.go
index c618783c08..957c82e484 100644
--- a/models/migrations/v1_16/v191.go
+++ b/models/migrations/v1_16/v191.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"code.gitea.io/gitea/modules/setting"
diff --git a/models/migrations/v1_16/v192.go b/models/migrations/v1_16/v192.go
index 2d5d158a09..9d03fbe3c8 100644
--- a/models/migrations/v1_16/v192.go
+++ b/models/migrations/v1_16/v192.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"code.gitea.io/gitea/models/migrations/base"
diff --git a/models/migrations/v1_16/v193.go b/models/migrations/v1_16/v193.go
index 8d3ce7a558..a5af2de380 100644
--- a/models/migrations/v1_16/v193.go
+++ b/models/migrations/v1_16/v193.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_16/v193_test.go b/models/migrations/v1_16/v193_test.go
index b279967a2c..2e827f0550 100644
--- a/models/migrations/v1_16/v193_test.go
+++ b/models/migrations/v1_16/v193_test.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"testing"
@@ -62,7 +62,7 @@ func Test_AddRepoIDForAttachment(t *testing.T) {
has, err := x.ID(attach.IssueID).Get(&issue)
assert.NoError(t, err)
assert.True(t, has)
- assert.EqualValues(t, attach.RepoID, issue.RepoID)
+ assert.Equal(t, attach.RepoID, issue.RepoID)
}
var releaseAttachments []*NewAttachment
@@ -75,6 +75,6 @@ func Test_AddRepoIDForAttachment(t *testing.T) {
has, err := x.ID(attach.ReleaseID).Get(&release)
assert.NoError(t, err)
assert.True(t, has)
- assert.EqualValues(t, attach.RepoID, release.RepoID)
+ assert.Equal(t, attach.RepoID, release.RepoID)
}
}
diff --git a/models/migrations/v1_16/v194.go b/models/migrations/v1_16/v194.go
index 6aa13c50cf..2e4ed8340e 100644
--- a/models/migrations/v1_16/v194.go
+++ b/models/migrations/v1_16/v194.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"fmt"
diff --git a/models/migrations/v1_16/v195.go b/models/migrations/v1_16/v195.go
index 6d7e94141e..4fd42b7bd2 100644
--- a/models/migrations/v1_16/v195.go
+++ b/models/migrations/v1_16/v195.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"fmt"
diff --git a/models/migrations/v1_16/v195_test.go b/models/migrations/v1_16/v195_test.go
index 742397bf32..946e06e399 100644
--- a/models/migrations/v1_16/v195_test.go
+++ b/models/migrations/v1_16/v195_test.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"testing"
diff --git a/models/migrations/v1_16/v196.go b/models/migrations/v1_16/v196.go
index 7cbafc61e5..6c9caa100f 100644
--- a/models/migrations/v1_16/v196.go
+++ b/models/migrations/v1_16/v196.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"fmt"
diff --git a/models/migrations/v1_16/v197.go b/models/migrations/v1_16/v197.go
index 97888b2847..862bdfdcbd 100644
--- a/models/migrations/v1_16/v197.go
+++ b/models/migrations/v1_16/v197.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_16/v198.go b/models/migrations/v1_16/v198.go
index 115bb313a0..f35ede138a 100644
--- a/models/migrations/v1_16/v198.go
+++ b/models/migrations/v1_16/v198.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"fmt"
diff --git a/models/migrations/v1_16/v199.go b/models/migrations/v1_16/v199.go
index 6adcf890af..4020352f2b 100644
--- a/models/migrations/v1_16/v199.go
+++ b/models/migrations/v1_16/v199.go
@@ -1,6 +1,6 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
// We used to use a table `remote_version` to store information for updater, now we use `AppState`, so this migration task is a no-op now.
diff --git a/models/migrations/v1_16/v200.go b/models/migrations/v1_16/v200.go
index c08c20e51d..de57fad8fe 100644
--- a/models/migrations/v1_16/v200.go
+++ b/models/migrations/v1_16/v200.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"fmt"
diff --git a/models/migrations/v1_16/v201.go b/models/migrations/v1_16/v201.go
index 35e0c9f2fb..2c43698b0c 100644
--- a/models/migrations/v1_16/v201.go
+++ b/models/migrations/v1_16/v201.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_16/v202.go b/models/migrations/v1_16/v202.go
index 6ba36152f1..d8c8fdcadc 100644
--- a/models/migrations/v1_16/v202.go
+++ b/models/migrations/v1_16/v202.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"fmt"
diff --git a/models/migrations/v1_16/v203.go b/models/migrations/v1_16/v203.go
index e8e6b52453..c3241cba57 100644
--- a/models/migrations/v1_16/v203.go
+++ b/models/migrations/v1_16/v203.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_16/v204.go b/models/migrations/v1_16/v204.go
index ece03e1305..4d375307e7 100644
--- a/models/migrations/v1_16/v204.go
+++ b/models/migrations/v1_16/v204.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import "xorm.io/xorm"
diff --git a/models/migrations/v1_16/v205.go b/models/migrations/v1_16/v205.go
index d6c577083c..78241bad5b 100644
--- a/models/migrations/v1_16/v205.go
+++ b/models/migrations/v1_16/v205.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"code.gitea.io/gitea/models/migrations/base"
diff --git a/models/migrations/v1_16/v206.go b/models/migrations/v1_16/v206.go
index 581a7d76e9..01a9c386eb 100644
--- a/models/migrations/v1_16/v206.go
+++ b/models/migrations/v1_16/v206.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"fmt"
diff --git a/models/migrations/v1_16/v207.go b/models/migrations/v1_16/v207.go
index 91208f066c..19126ead1f 100644
--- a/models/migrations/v1_16/v207.go
+++ b/models/migrations/v1_16/v207.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_16/v208.go b/models/migrations/v1_16/v208.go
index 1a11ef096a..fb643324f4 100644
--- a/models/migrations/v1_16/v208.go
+++ b/models/migrations/v1_16/v208.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_16/v209.go b/models/migrations/v1_16/v209.go
index be3100e02a..230838647b 100644
--- a/models/migrations/v1_16/v209.go
+++ b/models/migrations/v1_16/v209.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_16/v210.go b/models/migrations/v1_16/v210.go
index 51b7d81e99..0b94baf8e3 100644
--- a/models/migrations/v1_16/v210.go
+++ b/models/migrations/v1_16/v210.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"encoding/base32"
diff --git a/models/migrations/v1_16/v210_test.go b/models/migrations/v1_16/v210_test.go
index d43fb03106..3b4ac7aa4b 100644
--- a/models/migrations/v1_16/v210_test.go
+++ b/models/migrations/v1_16/v210_test.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_16 //nolint
+package v1_16
import (
"testing"
@@ -71,5 +71,5 @@ func Test_RemigrateU2FCredentials(t *testing.T) {
return
}
- assert.EqualValues(t, expected, got)
+ assert.Equal(t, expected, got)
}
diff --git a/models/migrations/v1_17/main_test.go b/models/migrations/v1_17/main_test.go
index 79cb3fa078..571a4f55a3 100644
--- a/models/migrations/v1_17/main_test.go
+++ b/models/migrations/v1_17/main_test.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_17 //nolint
+package v1_17
import (
"testing"
diff --git a/models/migrations/v1_17/v211.go b/models/migrations/v1_17/v211.go
index 9b72c8610b..517cf19388 100644
--- a/models/migrations/v1_17/v211.go
+++ b/models/migrations/v1_17/v211.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_17 //nolint
+package v1_17
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_17/v212.go b/models/migrations/v1_17/v212.go
index e3f9437121..788792211f 100644
--- a/models/migrations/v1_17/v212.go
+++ b/models/migrations/v1_17/v212.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_17 //nolint
+package v1_17
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_17/v213.go b/models/migrations/v1_17/v213.go
index bb3f466e52..b2bbdf7279 100644
--- a/models/migrations/v1_17/v213.go
+++ b/models/migrations/v1_17/v213.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_17 //nolint
+package v1_17
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_17/v214.go b/models/migrations/v1_17/v214.go
index 2268164919..1925324f0f 100644
--- a/models/migrations/v1_17/v214.go
+++ b/models/migrations/v1_17/v214.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_17 //nolint
+package v1_17
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_17/v215.go b/models/migrations/v1_17/v215.go
index b338f85417..748539225d 100644
--- a/models/migrations/v1_17/v215.go
+++ b/models/migrations/v1_17/v215.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_17 //nolint
+package v1_17
import (
"code.gitea.io/gitea/models/pull"
diff --git a/models/migrations/v1_17/v216.go b/models/migrations/v1_17/v216.go
index 268f472a42..37aeacb6fc 100644
--- a/models/migrations/v1_17/v216.go
+++ b/models/migrations/v1_17/v216.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_17 //nolint
+package v1_17
// This migration added non-ideal indices to the action table which on larger datasets slowed things down
// it has been superseded by v218.go
diff --git a/models/migrations/v1_17/v217.go b/models/migrations/v1_17/v217.go
index 3f970b68a5..04626bcbc5 100644
--- a/models/migrations/v1_17/v217.go
+++ b/models/migrations/v1_17/v217.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_17 //nolint
+package v1_17
import (
"code.gitea.io/gitea/modules/setting"
diff --git a/models/migrations/v1_17/v218.go b/models/migrations/v1_17/v218.go
index 4c05a9b539..17d4cd89d4 100644
--- a/models/migrations/v1_17/v218.go
+++ b/models/migrations/v1_17/v218.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_17 //nolint
+package v1_17
import (
"code.gitea.io/gitea/modules/setting"
diff --git a/models/migrations/v1_17/v219.go b/models/migrations/v1_17/v219.go
index d266029fd9..6e335cb813 100644
--- a/models/migrations/v1_17/v219.go
+++ b/models/migrations/v1_17/v219.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_17 //nolint
+package v1_17
import (
"time"
diff --git a/models/migrations/v1_17/v220.go b/models/migrations/v1_17/v220.go
index 904ddc5192..4ac8c58e1e 100644
--- a/models/migrations/v1_17/v220.go
+++ b/models/migrations/v1_17/v220.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_17 //nolint
+package v1_17
import (
packages_model "code.gitea.io/gitea/models/packages"
diff --git a/models/migrations/v1_17/v221.go b/models/migrations/v1_17/v221.go
index 9e159388bd..9e6a67eb18 100644
--- a/models/migrations/v1_17/v221.go
+++ b/models/migrations/v1_17/v221.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_17 //nolint
+package v1_17
import (
"encoding/base32"
diff --git a/models/migrations/v1_17/v221_test.go b/models/migrations/v1_17/v221_test.go
index 9ca54142e2..a2dc0fae55 100644
--- a/models/migrations/v1_17/v221_test.go
+++ b/models/migrations/v1_17/v221_test.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_17 //nolint
+package v1_17
import (
"encoding/base32"
diff --git a/models/migrations/v1_17/v222.go b/models/migrations/v1_17/v222.go
index 2ffb94eb1c..a5ea537d8a 100644
--- a/models/migrations/v1_17/v222.go
+++ b/models/migrations/v1_17/v222.go
@@ -1,10 +1,11 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_17 //nolint
+package v1_17
import (
"context"
+ "errors"
"fmt"
"code.gitea.io/gitea/models/migrations/base"
@@ -29,7 +30,7 @@ func DropOldCredentialIDColumn(x *xorm.Engine) error {
}
if !credentialIDBytesExists {
// looks like 221 hasn't properly run
- return fmt.Errorf("webauthn_credential does not have a credential_id_bytes column... it is not safe to run this migration")
+ return errors.New("webauthn_credential does not have a credential_id_bytes column... it is not safe to run this migration")
}
// Create webauthnCredential table
diff --git a/models/migrations/v1_17/v223.go b/models/migrations/v1_17/v223.go
index 018451ee4c..b2bfb76551 100644
--- a/models/migrations/v1_17/v223.go
+++ b/models/migrations/v1_17/v223.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_17 //nolint
+package v1_17
import (
"context"
diff --git a/models/migrations/v1_18/main_test.go b/models/migrations/v1_18/main_test.go
index f71a21d1fb..ebcfb45a94 100644
--- a/models/migrations/v1_18/main_test.go
+++ b/models/migrations/v1_18/main_test.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_18 //nolint
+package v1_18
import (
"testing"
diff --git a/models/migrations/v1_18/v224.go b/models/migrations/v1_18/v224.go
index f3d522b91a..6dc12020ea 100644
--- a/models/migrations/v1_18/v224.go
+++ b/models/migrations/v1_18/v224.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_18 //nolint
+package v1_18
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_18/v225.go b/models/migrations/v1_18/v225.go
index b0ac3777fc..bc6117e38f 100644
--- a/models/migrations/v1_18/v225.go
+++ b/models/migrations/v1_18/v225.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_18 //nolint
+package v1_18
import (
"code.gitea.io/gitea/modules/setting"
diff --git a/models/migrations/v1_18/v226.go b/models/migrations/v1_18/v226.go
index f87e24b11d..8ed9761476 100644
--- a/models/migrations/v1_18/v226.go
+++ b/models/migrations/v1_18/v226.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_18 //nolint
+package v1_18
import (
"xorm.io/builder"
diff --git a/models/migrations/v1_18/v227.go b/models/migrations/v1_18/v227.go
index 5fe5dcd0c9..3aca686d59 100644
--- a/models/migrations/v1_18/v227.go
+++ b/models/migrations/v1_18/v227.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_18 //nolint
+package v1_18
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_18/v228.go b/models/migrations/v1_18/v228.go
index 3e7a36de15..b13f6461bd 100644
--- a/models/migrations/v1_18/v228.go
+++ b/models/migrations/v1_18/v228.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_18 //nolint
+package v1_18
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_18/v229.go b/models/migrations/v1_18/v229.go
index 10d9f35097..bc15e01390 100644
--- a/models/migrations/v1_18/v229.go
+++ b/models/migrations/v1_18/v229.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_18 //nolint
+package v1_18
import (
"fmt"
diff --git a/models/migrations/v1_18/v229_test.go b/models/migrations/v1_18/v229_test.go
index d489328c00..5722dd3557 100644
--- a/models/migrations/v1_18/v229_test.go
+++ b/models/migrations/v1_18/v229_test.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_18 //nolint
+package v1_18
import (
"testing"
diff --git a/models/migrations/v1_18/v230.go b/models/migrations/v1_18/v230.go
index ea5b4d02e1..078fce7643 100644
--- a/models/migrations/v1_18/v230.go
+++ b/models/migrations/v1_18/v230.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_18 //nolint
+package v1_18
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_18/v230_test.go b/models/migrations/v1_18/v230_test.go
index 40db4c2ffe..25b2f6525d 100644
--- a/models/migrations/v1_18/v230_test.go
+++ b/models/migrations/v1_18/v230_test.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_18 //nolint
+package v1_18
import (
"testing"
diff --git a/models/migrations/v1_19/main_test.go b/models/migrations/v1_19/main_test.go
index 59f42af111..87e807be6e 100644
--- a/models/migrations/v1_19/main_test.go
+++ b/models/migrations/v1_19/main_test.go
@@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_19 //nolint
+package v1_19
import (
"testing"
diff --git a/models/migrations/v1_19/v231.go b/models/migrations/v1_19/v231.go
index 79e46132f0..8ef1e4e743 100644
--- a/models/migrations/v1_19/v231.go
+++ b/models/migrations/v1_19/v231.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_19 //nolint
+package v1_19
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_19/v232.go b/models/migrations/v1_19/v232.go
index 9caf587c1e..493dbc6df8 100644
--- a/models/migrations/v1_19/v232.go
+++ b/models/migrations/v1_19/v232.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_19 //nolint
+package v1_19
import (
"code.gitea.io/gitea/modules/setting"
diff --git a/models/migrations/v1_19/v233.go b/models/migrations/v1_19/v233.go
index ba4cd8e20b..9eb6d40509 100644
--- a/models/migrations/v1_19/v233.go
+++ b/models/migrations/v1_19/v233.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_19 //nolint
+package v1_19
import (
"fmt"
diff --git a/models/migrations/v1_19/v233_test.go b/models/migrations/v1_19/v233_test.go
index 32c10ab0f4..7436ff7483 100644
--- a/models/migrations/v1_19/v233_test.go
+++ b/models/migrations/v1_19/v233_test.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_19 //nolint
+package v1_19
import (
"testing"
@@ -64,7 +64,7 @@ func Test_AddHeaderAuthorizationEncryptedColWebhook(t *testing.T) {
assert.Equal(t, e.Meta, got[i].Meta)
if e.HeaderAuthorization == "" {
- assert.Equal(t, "", got[i].HeaderAuthorizationEncrypted)
+ assert.Empty(t, got[i].HeaderAuthorizationEncrypted)
} else {
cipherhex := got[i].HeaderAuthorizationEncrypted
cleartext, err := secret.DecryptSecret(setting.SecretKey, cipherhex)
diff --git a/models/migrations/v1_19/v234.go b/models/migrations/v1_19/v234.go
index 728a580807..3475384d6f 100644
--- a/models/migrations/v1_19/v234.go
+++ b/models/migrations/v1_19/v234.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_19 //nolint
+package v1_19
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_19/v235.go b/models/migrations/v1_19/v235.go
index 3715de3920..297d90f65a 100644
--- a/models/migrations/v1_19/v235.go
+++ b/models/migrations/v1_19/v235.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_19 //nolint
+package v1_19
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_19/v236.go b/models/migrations/v1_19/v236.go
index f172a85b1f..0ed4d97a27 100644
--- a/models/migrations/v1_19/v236.go
+++ b/models/migrations/v1_19/v236.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_19 //nolint
+package v1_19
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_19/v237.go b/models/migrations/v1_19/v237.go
index b23c765aa5..cf30226ccd 100644
--- a/models/migrations/v1_19/v237.go
+++ b/models/migrations/v1_19/v237.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_19 //nolint
+package v1_19
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_19/v238.go b/models/migrations/v1_19/v238.go
index 266e6cea58..de681bfc7a 100644
--- a/models/migrations/v1_19/v238.go
+++ b/models/migrations/v1_19/v238.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_19 //nolint
+package v1_19
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_19/v239.go b/models/migrations/v1_19/v239.go
index 10076f2401..8f4a65be95 100644
--- a/models/migrations/v1_19/v239.go
+++ b/models/migrations/v1_19/v239.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_19 //nolint
+package v1_19
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_19/v240.go b/models/migrations/v1_19/v240.go
index 4505f86299..7fdbaeb9dc 100644
--- a/models/migrations/v1_19/v240.go
+++ b/models/migrations/v1_19/v240.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_19 //nolint
+package v1_19
import (
"code.gitea.io/gitea/models/db"
diff --git a/models/migrations/v1_19/v241.go b/models/migrations/v1_19/v241.go
index a617d6fd2f..e35801a057 100644
--- a/models/migrations/v1_19/v241.go
+++ b/models/migrations/v1_19/v241.go
@@ -1,7 +1,7 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_19 //nolint
+package v1_19
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_19/v242.go b/models/migrations/v1_19/v242.go
index 4470835214..e9e759eaaa 100644
--- a/models/migrations/v1_19/v242.go
+++ b/models/migrations/v1_19/v242.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_19 //nolint
+package v1_19
import (
"code.gitea.io/gitea/modules/setting"
diff --git a/models/migrations/v1_19/v243.go b/models/migrations/v1_19/v243.go
index 55bbfafb2f..9c3f372594 100644
--- a/models/migrations/v1_19/v243.go
+++ b/models/migrations/v1_19/v243.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_19 //nolint
+package v1_19
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_20/main_test.go b/models/migrations/v1_20/main_test.go
index 92a1a9f622..2fd63a7118 100644
--- a/models/migrations/v1_20/main_test.go
+++ b/models/migrations/v1_20/main_test.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_20 //nolint
+package v1_20
import (
"testing"
diff --git a/models/migrations/v1_20/v244.go b/models/migrations/v1_20/v244.go
index 977566ad7d..76cdccaca5 100644
--- a/models/migrations/v1_20/v244.go
+++ b/models/migrations/v1_20/v244.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_20 //nolint
+package v1_20
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_20/v245.go b/models/migrations/v1_20/v245.go
index ab58d12880..4acb11416c 100644
--- a/models/migrations/v1_20/v245.go
+++ b/models/migrations/v1_20/v245.go
@@ -1,11 +1,10 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_20 //nolint
+package v1_20
import (
"context"
- "fmt"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/modules/setting"
@@ -57,7 +56,7 @@ func RenameWebhookOrgToOwner(x *xorm.Engine) error {
return err
}
sqlType := x.Dialect().SQLType(inferredTable.GetColumn("org_id"))
- if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE `webhook` CHANGE org_id owner_id %s", sqlType)); err != nil {
+ if _, err := sess.Exec("ALTER TABLE `webhook` CHANGE org_id owner_id " + sqlType); err != nil {
return err
}
case setting.Database.Type.IsMSSQL():
diff --git a/models/migrations/v1_20/v246.go b/models/migrations/v1_20/v246.go
index e6340ef079..22bf723404 100644
--- a/models/migrations/v1_20/v246.go
+++ b/models/migrations/v1_20/v246.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_20 //nolint
+package v1_20
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_20/v247.go b/models/migrations/v1_20/v247.go
index 59fc5c46b5..4f82937e18 100644
--- a/models/migrations/v1_20/v247.go
+++ b/models/migrations/v1_20/v247.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_20 //nolint
+package v1_20
import (
"code.gitea.io/gitea/modules/log"
diff --git a/models/migrations/v1_20/v248.go b/models/migrations/v1_20/v248.go
index 40555210e7..4f2091e4bc 100644
--- a/models/migrations/v1_20/v248.go
+++ b/models/migrations/v1_20/v248.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_20 //nolint
+package v1_20
import "xorm.io/xorm"
diff --git a/models/migrations/v1_20/v249.go b/models/migrations/v1_20/v249.go
index 02951a74d6..c6d3a177ca 100644
--- a/models/migrations/v1_20/v249.go
+++ b/models/migrations/v1_20/v249.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_20 //nolint
+package v1_20
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_20/v250.go b/models/migrations/v1_20/v250.go
index 86388ef0b8..ec45e6e5c3 100644
--- a/models/migrations/v1_20/v250.go
+++ b/models/migrations/v1_20/v250.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_20 //nolint
+package v1_20
import (
"strings"
diff --git a/models/migrations/v1_20/v251.go b/models/migrations/v1_20/v251.go
index 7743248a3f..a274c22a73 100644
--- a/models/migrations/v1_20/v251.go
+++ b/models/migrations/v1_20/v251.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_20 //nolint
+package v1_20
import (
"code.gitea.io/gitea/modules/log"
diff --git a/models/migrations/v1_20/v252.go b/models/migrations/v1_20/v252.go
index ab61cd9b8b..d6aa602753 100644
--- a/models/migrations/v1_20/v252.go
+++ b/models/migrations/v1_20/v252.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_20 //nolint
+package v1_20
import (
"code.gitea.io/gitea/modules/log"
diff --git a/models/migrations/v1_20/v253.go b/models/migrations/v1_20/v253.go
index 96c494bd8d..c96454dbf9 100644
--- a/models/migrations/v1_20/v253.go
+++ b/models/migrations/v1_20/v253.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_20 //nolint
+package v1_20
import (
"code.gitea.io/gitea/modules/log"
diff --git a/models/migrations/v1_20/v254.go b/models/migrations/v1_20/v254.go
index 1e26979a5b..9cdbfb3916 100644
--- a/models/migrations/v1_20/v254.go
+++ b/models/migrations/v1_20/v254.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_20 //nolint
+package v1_20
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_20/v255.go b/models/migrations/v1_20/v255.go
index 14b70f8f96..caf198700e 100644
--- a/models/migrations/v1_20/v255.go
+++ b/models/migrations/v1_20/v255.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_20 //nolint
+package v1_20
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_20/v256.go b/models/migrations/v1_20/v256.go
index 822153b93e..7b84c1e154 100644
--- a/models/migrations/v1_20/v256.go
+++ b/models/migrations/v1_20/v256.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_20 //nolint
+package v1_20
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_20/v257.go b/models/migrations/v1_20/v257.go
index 6c6ca4c748..9d5f7c07df 100644
--- a/models/migrations/v1_20/v257.go
+++ b/models/migrations/v1_20/v257.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_20 //nolint
+package v1_20
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_20/v258.go b/models/migrations/v1_20/v258.go
index 47174ce805..1d3faffdae 100644
--- a/models/migrations/v1_20/v258.go
+++ b/models/migrations/v1_20/v258.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_20 //nolint
+package v1_20
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_20/v259.go b/models/migrations/v1_20/v259.go
index 5b8ced4ad7..9e0dc9b61d 100644
--- a/models/migrations/v1_20/v259.go
+++ b/models/migrations/v1_20/v259.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_20 //nolint
+package v1_20
import (
"fmt"
@@ -329,7 +329,7 @@ func ConvertScopedAccessTokens(x *xorm.Engine) error {
for _, token := range tokens {
var scopes []string
allNewScopesMap := make(map[AccessTokenScope]bool)
- for _, oldScope := range strings.Split(token.Scope, ",") {
+ for oldScope := range strings.SplitSeq(token.Scope, ",") {
if newScopes, exists := accessTokenScopeMap[OldAccessTokenScope(oldScope)]; exists {
for _, newScope := range newScopes {
allNewScopesMap[newScope] = true
diff --git a/models/migrations/v1_20/v259_test.go b/models/migrations/v1_20/v259_test.go
index 5bc9a71391..0bf63719e5 100644
--- a/models/migrations/v1_20/v259_test.go
+++ b/models/migrations/v1_20/v259_test.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_20 //nolint
+package v1_20
import (
"sort"
@@ -96,7 +96,7 @@ func Test_ConvertScopedAccessTokens(t *testing.T) {
tokens := make([]AccessToken, 0)
err = x.Find(&tokens)
assert.NoError(t, err)
- assert.Equal(t, len(tests), len(tokens))
+ assert.Len(t, tokens, len(tests))
// sort the tokens (insertion order by auto-incrementing primary key)
sort.Slice(tokens, func(i, j int) bool {
diff --git a/models/migrations/v1_21/main_test.go b/models/migrations/v1_21/main_test.go
index 9afdea1677..536a7ade08 100644
--- a/models/migrations/v1_21/main_test.go
+++ b/models/migrations/v1_21/main_test.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
import (
"testing"
diff --git a/models/migrations/v1_21/v260.go b/models/migrations/v1_21/v260.go
index 6ca52c5998..8540c58ae8 100644
--- a/models/migrations/v1_21/v260.go
+++ b/models/migrations/v1_21/v260.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
import (
"code.gitea.io/gitea/models/migrations/base"
diff --git a/models/migrations/v1_21/v261.go b/models/migrations/v1_21/v261.go
index 4ec1160d0b..122b98eb93 100644
--- a/models/migrations/v1_21/v261.go
+++ b/models/migrations/v1_21/v261.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_21/v262.go b/models/migrations/v1_21/v262.go
index 23e900572a..6e88e29b9d 100644
--- a/models/migrations/v1_21/v262.go
+++ b/models/migrations/v1_21/v262.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_21/v263.go b/models/migrations/v1_21/v263.go
index 2c7cbadf0d..55c418bde0 100644
--- a/models/migrations/v1_21/v263.go
+++ b/models/migrations/v1_21/v263.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
import (
"fmt"
diff --git a/models/migrations/v1_21/v264.go b/models/migrations/v1_21/v264.go
index e81a17ad6d..7fc0ec6024 100644
--- a/models/migrations/v1_21/v264.go
+++ b/models/migrations/v1_21/v264.go
@@ -1,11 +1,11 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
import (
"context"
- "fmt"
+ "errors"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/timeutil"
@@ -57,7 +57,7 @@ func AddBranchTable(x *xorm.Engine) error {
if err != nil {
return err
} else if !has {
- return fmt.Errorf("no admin user found")
+ return errors.New("no admin user found")
}
branches := make([]Branch, 0, 100)
diff --git a/models/migrations/v1_21/v265.go b/models/migrations/v1_21/v265.go
index 800eb95f72..b6892acc27 100644
--- a/models/migrations/v1_21/v265.go
+++ b/models/migrations/v1_21/v265.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_21/v266.go b/models/migrations/v1_21/v266.go
index 79a5f5e14c..440549e868 100644
--- a/models/migrations/v1_21/v266.go
+++ b/models/migrations/v1_21/v266.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_21/v267.go b/models/migrations/v1_21/v267.go
index bc0e954bdc..394139a17e 100644
--- a/models/migrations/v1_21/v267.go
+++ b/models/migrations/v1_21/v267.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_21/v268.go b/models/migrations/v1_21/v268.go
index 332793ff07..b677d2383e 100644
--- a/models/migrations/v1_21/v268.go
+++ b/models/migrations/v1_21/v268.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_21/v269.go b/models/migrations/v1_21/v269.go
index 475ec02380..042040927d 100644
--- a/models/migrations/v1_21/v269.go
+++ b/models/migrations/v1_21/v269.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_21/v270.go b/models/migrations/v1_21/v270.go
index b9cc84d3ac..ab7c5660ba 100644
--- a/models/migrations/v1_21/v270.go
+++ b/models/migrations/v1_21/v270.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_21/v271.go b/models/migrations/v1_21/v271.go
index 098f6499d5..05e1af1351 100644
--- a/models/migrations/v1_21/v271.go
+++ b/models/migrations/v1_21/v271.go
@@ -1,7 +1,8 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
+
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_21/v272.go b/models/migrations/v1_21/v272.go
index a729c49f1b..14c1e0c4b0 100644
--- a/models/migrations/v1_21/v272.go
+++ b/models/migrations/v1_21/v272.go
@@ -1,7 +1,8 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
+
import (
"xorm.io/xorm"
)
diff --git a/models/migrations/v1_21/v273.go b/models/migrations/v1_21/v273.go
index 61c79f4a76..e614a56a7d 100644
--- a/models/migrations/v1_21/v273.go
+++ b/models/migrations/v1_21/v273.go
@@ -1,7 +1,8 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
+
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_21/v274.go b/models/migrations/v1_21/v274.go
index df5994f159..d0b557a151 100644
--- a/models/migrations/v1_21/v274.go
+++ b/models/migrations/v1_21/v274.go
@@ -1,7 +1,8 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
+
import (
"time"
diff --git a/models/migrations/v1_21/v275.go b/models/migrations/v1_21/v275.go
index 78804a59d6..2bfe5c72fa 100644
--- a/models/migrations/v1_21/v275.go
+++ b/models/migrations/v1_21/v275.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_21/v276.go b/models/migrations/v1_21/v276.go
index 9d22c9052e..3ab7e22cd0 100644
--- a/models/migrations/v1_21/v276.go
+++ b/models/migrations/v1_21/v276.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
import (
"context"
diff --git a/models/migrations/v1_21/v277.go b/models/migrations/v1_21/v277.go
index 12529160b7..0c102eddde 100644
--- a/models/migrations/v1_21/v277.go
+++ b/models/migrations/v1_21/v277.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_21/v278.go b/models/migrations/v1_21/v278.go
index d6a462d1e7..846f228678 100644
--- a/models/migrations/v1_21/v278.go
+++ b/models/migrations/v1_21/v278.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_21/v279.go b/models/migrations/v1_21/v279.go
index 2abd1bbe84..beb39effe1 100644
--- a/models/migrations/v1_21/v279.go
+++ b/models/migrations/v1_21/v279.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_21 //nolint
+package v1_21
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_22/main_test.go b/models/migrations/v1_22/main_test.go
index efd8dbaa8c..ac8facd6aa 100644
--- a/models/migrations/v1_22/main_test.go
+++ b/models/migrations/v1_22/main_test.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import (
"testing"
diff --git a/models/migrations/v1_22/v280.go b/models/migrations/v1_22/v280.go
index a8ee4a3bf7..2271cb6089 100644
--- a/models/migrations/v1_22/v280.go
+++ b/models/migrations/v1_22/v280.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_22/v281.go b/models/migrations/v1_22/v281.go
index fc1866aa83..129ec2cba0 100644
--- a/models/migrations/v1_22/v281.go
+++ b/models/migrations/v1_22/v281.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_22/v282.go b/models/migrations/v1_22/v282.go
index baad9e0916..eed64c30f7 100644
--- a/models/migrations/v1_22/v282.go
+++ b/models/migrations/v1_22/v282.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_22/v283.go b/models/migrations/v1_22/v283.go
index 0a45c51245..0eca031b65 100644
--- a/models/migrations/v1_22/v283.go
+++ b/models/migrations/v1_22/v283.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import (
"fmt"
diff --git a/models/migrations/v1_22/v283_test.go b/models/migrations/v1_22/v283_test.go
index e89a7cbfc2..743f860466 100644
--- a/models/migrations/v1_22/v283_test.go
+++ b/models/migrations/v1_22/v283_test.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import (
"testing"
diff --git a/models/migrations/v1_22/v284.go b/models/migrations/v1_22/v284.go
index 2b95078980..31b38f6aed 100644
--- a/models/migrations/v1_22/v284.go
+++ b/models/migrations/v1_22/v284.go
@@ -1,7 +1,8 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
+
import (
"xorm.io/xorm"
)
diff --git a/models/migrations/v1_22/v285.go b/models/migrations/v1_22/v285.go
index a55cc17c04..fed89f670e 100644
--- a/models/migrations/v1_22/v285.go
+++ b/models/migrations/v1_22/v285.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import (
"time"
diff --git a/models/migrations/v1_22/v286.go b/models/migrations/v1_22/v286.go
index 1fcde33202..f3ba50dbb6 100644
--- a/models/migrations/v1_22/v286.go
+++ b/models/migrations/v1_22/v286.go
@@ -1,6 +1,6 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import (
"errors"
diff --git a/models/migrations/v1_22/v286_test.go b/models/migrations/v1_22/v286_test.go
index 1f213ddb6e..b4a50f6fcb 100644
--- a/models/migrations/v1_22/v286_test.go
+++ b/models/migrations/v1_22/v286_test.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import (
"testing"
@@ -108,11 +108,11 @@ func Test_RepositoryFormat(t *testing.T) {
ok, err := x.ID(2).Get(repo)
assert.NoError(t, err)
assert.True(t, ok)
- assert.EqualValues(t, "sha1", repo.ObjectFormatName)
+ assert.Equal(t, "sha1", repo.ObjectFormatName)
repo = new(Repository)
ok, err = x.ID(id).Get(repo)
assert.NoError(t, err)
assert.True(t, ok)
- assert.EqualValues(t, "sha256", repo.ObjectFormatName)
+ assert.Equal(t, "sha256", repo.ObjectFormatName)
}
diff --git a/models/migrations/v1_22/v287.go b/models/migrations/v1_22/v287.go
index c8b1593286..5fd901f9de 100644
--- a/models/migrations/v1_22/v287.go
+++ b/models/migrations/v1_22/v287.go
@@ -1,7 +1,7 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_22/v287_test.go b/models/migrations/v1_22/v287_test.go
index 9c7b10947d..2b42a33c38 100644
--- a/models/migrations/v1_22/v287_test.go
+++ b/models/migrations/v1_22/v287_test.go
@@ -1,10 +1,10 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import (
- "fmt"
+ "strconv"
"testing"
"code.gitea.io/gitea/models/migrations/base"
@@ -50,7 +50,7 @@ func Test_UpdateBadgeColName(t *testing.T) {
for i, e := range oldBadges {
got := got[i+1] // 1 is in the badge.yml
assert.Equal(t, e.ID, got.ID)
- assert.Equal(t, fmt.Sprintf("%d", e.ID), got.Slug)
+ assert.Equal(t, strconv.FormatInt(e.ID, 10), got.Slug)
}
// TODO: check if badges have been updated
diff --git a/models/migrations/v1_22/v288.go b/models/migrations/v1_22/v288.go
index 7c93bfcc66..26c850c218 100644
--- a/models/migrations/v1_22/v288.go
+++ b/models/migrations/v1_22/v288.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_22/v289.go b/models/migrations/v1_22/v289.go
index b9941aadd9..78689a4ffa 100644
--- a/models/migrations/v1_22/v289.go
+++ b/models/migrations/v1_22/v289.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import "xorm.io/xorm"
diff --git a/models/migrations/v1_22/v290.go b/models/migrations/v1_22/v290.go
index 9c54d4e87c..0f4d78410c 100644
--- a/models/migrations/v1_22/v290.go
+++ b/models/migrations/v1_22/v290.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_22/v291.go b/models/migrations/v1_22/v291.go
index 74726fae96..823a644a95 100644
--- a/models/migrations/v1_22/v291.go
+++ b/models/migrations/v1_22/v291.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import "xorm.io/xorm"
diff --git a/models/migrations/v1_22/v292.go b/models/migrations/v1_22/v292.go
index beca556aee..440f48ce80 100644
--- a/models/migrations/v1_22/v292.go
+++ b/models/migrations/v1_22/v292.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
// NOTE: noop the original migration has bug which some projects will be skip, so
// these projects will have no default board.
diff --git a/models/migrations/v1_22/v293.go b/models/migrations/v1_22/v293.go
index 53cc719294..5299b8618f 100644
--- a/models/migrations/v1_22/v293.go
+++ b/models/migrations/v1_22/v293.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import (
"code.gitea.io/gitea/modules/setting"
diff --git a/models/migrations/v1_22/v293_test.go b/models/migrations/v1_22/v293_test.go
index cfe4345143..2c8f7683a8 100644
--- a/models/migrations/v1_22/v293_test.go
+++ b/models/migrations/v1_22/v293_test.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import (
"testing"
diff --git a/models/migrations/v1_22/v294.go b/models/migrations/v1_22/v294.go
index 20e261fb1b..8776e51a16 100644
--- a/models/migrations/v1_22/v294.go
+++ b/models/migrations/v1_22/v294.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import (
"fmt"
diff --git a/models/migrations/v1_22/v294_test.go b/models/migrations/v1_22/v294_test.go
index a1d702cb77..1cf03d6120 100644
--- a/models/migrations/v1_22/v294_test.go
+++ b/models/migrations/v1_22/v294_test.go
@@ -1,10 +1,9 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import (
- "slices"
"testing"
"code.gitea.io/gitea/models/migrations/base"
@@ -44,7 +43,7 @@ func Test_AddUniqueIndexForProjectIssue(t *testing.T) {
for _, index := range tables[0].Indexes {
if index.Type == schemas.UniqueType {
found = true
- slices.Equal(index.Cols, []string{"project_id", "issue_id"})
+ assert.ElementsMatch(t, index.Cols, []string{"project_id", "issue_id"})
break
}
}
diff --git a/models/migrations/v1_22/v295.go b/models/migrations/v1_22/v295.go
index 17bdadb4ad..319b1a399b 100644
--- a/models/migrations/v1_22/v295.go
+++ b/models/migrations/v1_22/v295.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import "xorm.io/xorm"
diff --git a/models/migrations/v1_22/v296.go b/models/migrations/v1_22/v296.go
index 1ecacab95f..75350f9f65 100644
--- a/models/migrations/v1_22/v296.go
+++ b/models/migrations/v1_22/v296.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import "xorm.io/xorm"
diff --git a/models/migrations/v1_22/v297.go b/models/migrations/v1_22/v297.go
index 7d4b506925..9a4405f266 100644
--- a/models/migrations/v1_22/v297.go
+++ b/models/migrations/v1_22/v297.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import (
"code.gitea.io/gitea/models/perm"
diff --git a/models/migrations/v1_22/v298.go b/models/migrations/v1_22/v298.go
index b9f3b95ade..7700173a00 100644
--- a/models/migrations/v1_22/v298.go
+++ b/models/migrations/v1_22/v298.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_22 //nolint
+package v1_22
import "xorm.io/xorm"
diff --git a/models/migrations/v1_23/main_test.go b/models/migrations/v1_23/main_test.go
index b7948bd4dd..f7b2caed83 100644
--- a/models/migrations/v1_23/main_test.go
+++ b/models/migrations/v1_23/main_test.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_23 //nolint
+package v1_23
import (
"testing"
diff --git a/models/migrations/v1_23/v299.go b/models/migrations/v1_23/v299.go
index f6db960c3b..11021d8855 100644
--- a/models/migrations/v1_23/v299.go
+++ b/models/migrations/v1_23/v299.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_23 //nolint
+package v1_23
import "xorm.io/xorm"
@@ -14,5 +14,9 @@ func AddContentVersionToIssueAndComment(x *xorm.Engine) error {
ContentVersion int `xorm:"NOT NULL DEFAULT 0"`
}
- return x.Sync(new(Comment), new(Issue))
+ _, err := x.SyncWithOptions(xorm.SyncOptions{
+ IgnoreConstrains: true,
+ IgnoreIndices: true,
+ }, new(Comment), new(Issue))
+ return err
}
diff --git a/models/migrations/v1_23/v300.go b/models/migrations/v1_23/v300.go
index f1f1cccdbf..13c6489c5e 100644
--- a/models/migrations/v1_23/v300.go
+++ b/models/migrations/v1_23/v300.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_23 //nolint
+package v1_23
import "xorm.io/xorm"
@@ -13,5 +13,9 @@ func AddForcePushBranchProtection(x *xorm.Engine) error {
ForcePushAllowlistTeamIDs []int64 `xorm:"JSON TEXT"`
ForcePushAllowlistDeployKeys bool `xorm:"NOT NULL DEFAULT false"`
}
- return x.Sync(new(ProtectedBranch))
+ _, err := x.SyncWithOptions(xorm.SyncOptions{
+ IgnoreConstrains: true,
+ IgnoreIndices: true,
+ }, new(ProtectedBranch))
+ return err
}
diff --git a/models/migrations/v1_23/v301.go b/models/migrations/v1_23/v301.go
index b7797f6c6b..ed8e9ef059 100644
--- a/models/migrations/v1_23/v301.go
+++ b/models/migrations/v1_23/v301.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_23 //nolint
+package v1_23
import "xorm.io/xorm"
@@ -10,5 +10,9 @@ func AddSkipSecondaryAuthColumnToOAuth2ApplicationTable(x *xorm.Engine) error {
type oauth2Application struct {
SkipSecondaryAuthorization bool `xorm:"NOT NULL DEFAULT FALSE"`
}
- return x.Sync(new(oauth2Application))
+ _, err := x.SyncWithOptions(xorm.SyncOptions{
+ IgnoreConstrains: true,
+ IgnoreIndices: true,
+ }, new(oauth2Application))
+ return err
}
diff --git a/models/migrations/v1_23/v302.go b/models/migrations/v1_23/v302.go
index d7ea03eb3d..e4a50b3ec8 100644
--- a/models/migrations/v1_23/v302.go
+++ b/models/migrations/v1_23/v302.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_23 //nolint
+package v1_23
import (
"code.gitea.io/gitea/modules/timeutil"
@@ -14,5 +14,8 @@ func AddIndexToActionTaskStoppedLogExpired(x *xorm.Engine) error {
Stopped timeutil.TimeStamp `xorm:"index(stopped_log_expired)"`
LogExpired bool `xorm:"index(stopped_log_expired)"`
}
- return x.Sync(new(ActionTask))
+ _, err := x.SyncWithOptions(xorm.SyncOptions{
+ IgnoreDropIndices: true,
+ }, new(ActionTask))
+ return err
}
diff --git a/models/migrations/v1_23/v302_test.go b/models/migrations/v1_23/v302_test.go
new file mode 100644
index 0000000000..b008b6fc03
--- /dev/null
+++ b/models/migrations/v1_23/v302_test.go
@@ -0,0 +1,51 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_23
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/migrations/base"
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func Test_AddIndexToActionTaskStoppedLogExpired(t *testing.T) {
+ type ActionTask struct {
+ ID int64
+ JobID int64
+ Attempt int64
+ RunnerID int64 `xorm:"index"`
+ Status int `xorm:"index"`
+ Started timeutil.TimeStamp `xorm:"index"`
+ Stopped timeutil.TimeStamp `xorm:"index(stopped_log_expired)"`
+
+ RepoID int64 `xorm:"index"`
+ OwnerID int64 `xorm:"index"`
+ CommitSHA string `xorm:"index"`
+ IsForkPullRequest bool
+
+ Token string `xorm:"-"`
+ TokenHash string `xorm:"UNIQUE"` // sha256 of token
+ TokenSalt string
+ TokenLastEight string `xorm:"index token_last_eight"`
+
+ LogFilename string // file name of log
+ LogInStorage bool // read log from database or from storage
+ LogLength int64 // lines count
+ LogSize int64 // blob size
+ LogIndexes []int64 `xorm:"LONGBLOB"` // line number to offset
+ LogExpired bool `xorm:"index(stopped_log_expired)"` // files that are too old will be deleted
+
+ Created timeutil.TimeStamp `xorm:"created"`
+ Updated timeutil.TimeStamp `xorm:"updated index"`
+ }
+
+ // Prepare and load the testing database
+ x, deferable := base.PrepareTestEnv(t, 0, new(ActionTask))
+ defer deferable()
+
+ assert.NoError(t, AddIndexToActionTaskStoppedLogExpired(x))
+}
diff --git a/models/migrations/v1_23/v303.go b/models/migrations/v1_23/v303.go
index adfe917d3f..dc541a9535 100644
--- a/models/migrations/v1_23/v303.go
+++ b/models/migrations/v1_23/v303.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_23 //nolint
+package v1_23
import (
"xorm.io/xorm"
@@ -19,5 +19,9 @@ func AddCommentMetaDataColumn(x *xorm.Engine) error {
CommentMetaData *CommentMetaData `xorm:"JSON TEXT"` // put all non-index metadata in a single field
}
- return x.Sync(new(Comment))
+ _, err := x.SyncWithOptions(xorm.SyncOptions{
+ IgnoreConstrains: true,
+ IgnoreIndices: true,
+ }, new(Comment))
+ return err
}
diff --git a/models/migrations/v1_23/v304.go b/models/migrations/v1_23/v304.go
index 65cffedbd9..35d4d4881a 100644
--- a/models/migrations/v1_23/v304.go
+++ b/models/migrations/v1_23/v304.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_23 //nolint
+package v1_23
import "xorm.io/xorm"
@@ -9,5 +9,8 @@ func AddIndexForReleaseSha1(x *xorm.Engine) error {
type Release struct {
Sha1 string `xorm:"INDEX VARCHAR(64)"`
}
- return x.Sync(new(Release))
+ _, err := x.SyncWithOptions(xorm.SyncOptions{
+ IgnoreDropIndices: true,
+ }, new(Release))
+ return err
}
diff --git a/models/migrations/v1_23/v304_test.go b/models/migrations/v1_23/v304_test.go
new file mode 100644
index 0000000000..c3dfa5e7e7
--- /dev/null
+++ b/models/migrations/v1_23/v304_test.go
@@ -0,0 +1,40 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_23
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/migrations/base"
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func Test_AddIndexForReleaseSha1(t *testing.T) {
+ type Release struct {
+ ID int64 `xorm:"pk autoincr"`
+ RepoID int64 `xorm:"INDEX UNIQUE(n)"`
+ PublisherID int64 `xorm:"INDEX"`
+ TagName string `xorm:"INDEX UNIQUE(n)"`
+ OriginalAuthor string
+ OriginalAuthorID int64 `xorm:"index"`
+ LowerTagName string
+ Target string
+ Title string
+ Sha1 string `xorm:"VARCHAR(64)"`
+ NumCommits int64
+ Note string `xorm:"TEXT"`
+ IsDraft bool `xorm:"NOT NULL DEFAULT false"`
+ IsPrerelease bool `xorm:"NOT NULL DEFAULT false"`
+ IsTag bool `xorm:"NOT NULL DEFAULT false"` // will be true only if the record is a tag and has no related releases
+ CreatedUnix timeutil.TimeStamp `xorm:"INDEX"`
+ }
+
+ // Prepare and load the testing database
+ x, deferable := base.PrepareTestEnv(t, 0, new(Release))
+ defer deferable()
+
+ assert.NoError(t, AddIndexForReleaseSha1(x))
+}
diff --git a/models/migrations/v1_23/v305.go b/models/migrations/v1_23/v305.go
index 4d881192b2..3762279de1 100644
--- a/models/migrations/v1_23/v305.go
+++ b/models/migrations/v1_23/v305.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_23 //nolint
+package v1_23
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_23/v306.go b/models/migrations/v1_23/v306.go
index 276b438e95..c5c89dbeb8 100644
--- a/models/migrations/v1_23/v306.go
+++ b/models/migrations/v1_23/v306.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_23 //nolint
+package v1_23
import "xorm.io/xorm"
@@ -9,5 +9,9 @@ func AddBlockAdminMergeOverrideBranchProtection(x *xorm.Engine) error {
type ProtectedBranch struct {
BlockAdminMergeOverride bool `xorm:"NOT NULL DEFAULT false"`
}
- return x.Sync(new(ProtectedBranch))
+ _, err := x.SyncWithOptions(xorm.SyncOptions{
+ IgnoreConstrains: true,
+ IgnoreIndices: true,
+ }, new(ProtectedBranch))
+ return err
}
diff --git a/models/migrations/v1_23/v307.go b/models/migrations/v1_23/v307.go
index ef7f5f2c3f..54a69d250b 100644
--- a/models/migrations/v1_23/v307.go
+++ b/models/migrations/v1_23/v307.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_23 //nolint
+package v1_23
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_23/v308.go b/models/migrations/v1_23/v308.go
index 1e8a9b0af2..695fdfcc2d 100644
--- a/models/migrations/v1_23/v308.go
+++ b/models/migrations/v1_23/v308.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_23 //nolint
+package v1_23
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_23/v309.go b/models/migrations/v1_23/v309.go
index 5b39398443..e629b718a8 100644
--- a/models/migrations/v1_23/v309.go
+++ b/models/migrations/v1_23/v309.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_23 //nolint
+package v1_23
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_23/v310.go b/models/migrations/v1_23/v310.go
index 394417f5a0..074b1c54d3 100644
--- a/models/migrations/v1_23/v310.go
+++ b/models/migrations/v1_23/v310.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_23 //nolint
+package v1_23
import (
"xorm.io/xorm"
@@ -12,5 +12,9 @@ func AddPriorityToProtectedBranch(x *xorm.Engine) error {
Priority int64 `xorm:"NOT NULL DEFAULT 0"`
}
- return x.Sync(new(ProtectedBranch))
+ _, err := x.SyncWithOptions(xorm.SyncOptions{
+ IgnoreConstrains: true,
+ IgnoreIndices: true,
+ }, new(ProtectedBranch))
+ return err
}
diff --git a/models/migrations/v1_23/v311.go b/models/migrations/v1_23/v311.go
index 0fc1ac8c0e..ef48085c79 100644
--- a/models/migrations/v1_23/v311.go
+++ b/models/migrations/v1_23/v311.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_23 //nolint
+package v1_23
import (
"xorm.io/xorm"
@@ -11,6 +11,9 @@ func AddTimeEstimateColumnToIssueTable(x *xorm.Engine) error {
type Issue struct {
TimeEstimate int64 `xorm:"NOT NULL DEFAULT 0"`
}
-
- return x.Sync(new(Issue))
+ _, err := x.SyncWithOptions(xorm.SyncOptions{
+ IgnoreConstrains: true,
+ IgnoreIndices: true,
+ }, new(Issue))
+ return err
}
diff --git a/models/migrations/v1_24/v312.go b/models/migrations/v1_24/v312.go
index 9766dc1ccf..823b0eae40 100644
--- a/models/migrations/v1_24/v312.go
+++ b/models/migrations/v1_24/v312.go
@@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_24 //nolint
+package v1_24
import (
"xorm.io/xorm"
@@ -17,5 +17,9 @@ func (pullAutoMerge) TableName() string {
}
func AddDeleteBranchAfterMergeForAutoMerge(x *xorm.Engine) error {
- return x.Sync(new(pullAutoMerge))
+ _, err := x.SyncWithOptions(xorm.SyncOptions{
+ IgnoreConstrains: true,
+ IgnoreIndices: true,
+ }, new(pullAutoMerge))
+ return err
}
diff --git a/models/migrations/v1_24/v313.go b/models/migrations/v1_24/v313.go
index ee9d479340..7e6cda6bfd 100644
--- a/models/migrations/v1_24/v313.go
+++ b/models/migrations/v1_24/v313.go
@@ -1,7 +1,7 @@
// Copyright 2025 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_24 //nolint
+package v1_24
import (
"code.gitea.io/gitea/models/migrations/base"
diff --git a/models/migrations/v1_24/v314.go b/models/migrations/v1_24/v314.go
index e537be13b5..51cb2e34aa 100644
--- a/models/migrations/v1_24/v314.go
+++ b/models/migrations/v1_24/v314.go
@@ -1,7 +1,7 @@
// Copyright 2025 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_24 //nolint
+package v1_24
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_24/v315.go b/models/migrations/v1_24/v315.go
new file mode 100644
index 0000000000..52b9b44785
--- /dev/null
+++ b/models/migrations/v1_24/v315.go
@@ -0,0 +1,19 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_24
+
+import (
+ "xorm.io/xorm"
+)
+
+func AddEphemeralToActionRunner(x *xorm.Engine) error {
+ type ActionRunner struct {
+ Ephemeral bool `xorm:"ephemeral NOT NULL DEFAULT false"`
+ }
+ _, err := x.SyncWithOptions(xorm.SyncOptions{
+ IgnoreConstrains: true,
+ IgnoreIndices: true,
+ }, new(ActionRunner))
+ return err
+}
diff --git a/models/migrations/v1_24/v316.go b/models/migrations/v1_24/v316.go
new file mode 100644
index 0000000000..14e888f9ee
--- /dev/null
+++ b/models/migrations/v1_24/v316.go
@@ -0,0 +1,24 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_24
+
+import (
+ "xorm.io/xorm"
+)
+
+func AddDescriptionForSecretsAndVariables(x *xorm.Engine) error {
+ type Secret struct {
+ Description string `xorm:"TEXT"`
+ }
+
+ type ActionVariable struct {
+ Description string `xorm:"TEXT"`
+ }
+
+ _, err := x.SyncWithOptions(xorm.SyncOptions{
+ IgnoreConstrains: true,
+ IgnoreIndices: true,
+ }, new(Secret), new(ActionVariable))
+ return err
+}
diff --git a/models/migrations/v1_24/v317.go b/models/migrations/v1_24/v317.go
new file mode 100644
index 0000000000..a13db2dd27
--- /dev/null
+++ b/models/migrations/v1_24/v317.go
@@ -0,0 +1,56 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_24
+
+import (
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "xorm.io/xorm"
+ "xorm.io/xorm/schemas"
+)
+
+type improveActionTableIndicesAction struct {
+ ID int64 `xorm:"pk autoincr"`
+ UserID int64 `xorm:"INDEX"` // Receiver user id.
+ OpType int
+ ActUserID int64 // Action user id.
+ RepoID int64
+ CommentID int64 `xorm:"INDEX"`
+ IsDeleted bool `xorm:"NOT NULL DEFAULT false"`
+ RefName string
+ IsPrivate bool `xorm:"NOT NULL DEFAULT false"`
+ Content string `xorm:"TEXT"`
+ CreatedUnix timeutil.TimeStamp `xorm:"created"`
+}
+
+// TableName sets the name of this table
+func (*improveActionTableIndicesAction) TableName() string {
+ return "action"
+}
+
+// TableIndices implements xorm's TableIndices interface
+func (a *improveActionTableIndicesAction) TableIndices() []*schemas.Index {
+ repoIndex := schemas.NewIndex("r_u_d", schemas.IndexType)
+ repoIndex.AddColumn("repo_id", "user_id", "is_deleted")
+
+ actUserIndex := schemas.NewIndex("au_r_c_u_d", schemas.IndexType)
+ actUserIndex.AddColumn("act_user_id", "repo_id", "created_unix", "user_id", "is_deleted")
+
+ cudIndex := schemas.NewIndex("c_u_d", schemas.IndexType)
+ cudIndex.AddColumn("created_unix", "user_id", "is_deleted")
+
+ cuIndex := schemas.NewIndex("c_u", schemas.IndexType)
+ cuIndex.AddColumn("user_id", "is_deleted")
+
+ actUserUserIndex := schemas.NewIndex("au_c_u", schemas.IndexType)
+ actUserUserIndex.AddColumn("act_user_id", "created_unix", "user_id")
+
+ indices := []*schemas.Index{actUserIndex, repoIndex, cudIndex, cuIndex, actUserUserIndex}
+
+ return indices
+}
+
+func AddNewIndexForUserDashboard(x *xorm.Engine) error {
+ return x.Sync(new(improveActionTableIndicesAction))
+}
diff --git a/models/migrations/v1_24/v318.go b/models/migrations/v1_24/v318.go
new file mode 100644
index 0000000000..9b4a540960
--- /dev/null
+++ b/models/migrations/v1_24/v318.go
@@ -0,0 +1,21 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_24
+
+import (
+ "code.gitea.io/gitea/models/perm"
+
+ "xorm.io/xorm"
+)
+
+func AddRepoUnitAnonymousAccessMode(x *xorm.Engine) error {
+ type RepoUnit struct { //revive:disable-line:exported
+ AnonymousAccessMode perm.AccessMode `xorm:"NOT NULL DEFAULT 0"`
+ }
+ _, err := x.SyncWithOptions(xorm.SyncOptions{
+ IgnoreConstrains: true,
+ IgnoreIndices: true,
+ }, new(RepoUnit))
+ return err
+}
diff --git a/models/migrations/v1_24/v319.go b/models/migrations/v1_24/v319.go
new file mode 100644
index 0000000000..648081f74e
--- /dev/null
+++ b/models/migrations/v1_24/v319.go
@@ -0,0 +1,19 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_24
+
+import (
+ "xorm.io/xorm"
+)
+
+func AddExclusiveOrderColumnToLabelTable(x *xorm.Engine) error {
+ type Label struct {
+ ExclusiveOrder int `xorm:"DEFAULT 0"`
+ }
+ _, err := x.SyncWithOptions(xorm.SyncOptions{
+ IgnoreConstrains: true,
+ IgnoreIndices: true,
+ }, new(Label))
+ return err
+}
diff --git a/models/migrations/v1_24/v320.go b/models/migrations/v1_24/v320.go
new file mode 100644
index 0000000000..ebef71939c
--- /dev/null
+++ b/models/migrations/v1_24/v320.go
@@ -0,0 +1,57 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_24
+
+import (
+ "code.gitea.io/gitea/modules/json"
+
+ "xorm.io/xorm"
+)
+
+func MigrateSkipTwoFactor(x *xorm.Engine) error {
+ type LoginSource struct {
+ TwoFactorPolicy string `xorm:"two_factor_policy NOT NULL DEFAULT ''"`
+ }
+ _, err := x.SyncWithOptions(
+ xorm.SyncOptions{
+ IgnoreConstrains: true,
+ IgnoreIndices: true,
+ },
+ new(LoginSource),
+ )
+ if err != nil {
+ return err
+ }
+
+ type LoginSourceSimple struct {
+ ID int64
+ Cfg string
+ }
+
+ var loginSources []LoginSourceSimple
+ err = x.Table("login_source").Find(&loginSources)
+ if err != nil {
+ return err
+ }
+
+ for _, source := range loginSources {
+ if source.Cfg == "" {
+ continue
+ }
+
+ var cfg map[string]any
+ err = json.Unmarshal([]byte(source.Cfg), &cfg)
+ if err != nil {
+ return err
+ }
+
+ if cfg["SkipLocalTwoFA"] == true {
+ _, err = x.Exec("UPDATE login_source SET two_factor_policy = 'skip' WHERE id = ?", source.ID)
+ if err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+}
diff --git a/models/migrations/v1_25/main_test.go b/models/migrations/v1_25/main_test.go
new file mode 100644
index 0000000000..d2c4a4105d
--- /dev/null
+++ b/models/migrations/v1_25/main_test.go
@@ -0,0 +1,14 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_25
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/migrations/base"
+)
+
+func TestMain(m *testing.M) {
+ base.MainTest(m)
+}
diff --git a/models/migrations/v1_25/v321.go b/models/migrations/v1_25/v321.go
new file mode 100644
index 0000000000..73ef180f48
--- /dev/null
+++ b/models/migrations/v1_25/v321.go
@@ -0,0 +1,52 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_25
+
+import (
+ "code.gitea.io/gitea/models/migrations/base"
+ "code.gitea.io/gitea/modules/setting"
+
+ "xorm.io/xorm"
+ "xorm.io/xorm/schemas"
+)
+
+func UseLongTextInSomeColumnsAndFixBugs(x *xorm.Engine) error {
+ if !setting.Database.Type.IsMySQL() {
+ return nil // Only mysql need to change from text to long text, for other databases, they are the same
+ }
+
+ if err := base.ModifyColumn(x, "review_state", &schemas.Column{
+ Name: "updated_files",
+ SQLType: schemas.SQLType{
+ Name: "LONGTEXT",
+ },
+ Length: 0,
+ Nullable: false,
+ DefaultIsEmpty: true,
+ }); err != nil {
+ return err
+ }
+
+ if err := base.ModifyColumn(x, "package_property", &schemas.Column{
+ Name: "value",
+ SQLType: schemas.SQLType{
+ Name: "LONGTEXT",
+ },
+ Length: 0,
+ Nullable: false,
+ DefaultIsEmpty: true,
+ }); err != nil {
+ return err
+ }
+
+ return base.ModifyColumn(x, "notice", &schemas.Column{
+ Name: "description",
+ SQLType: schemas.SQLType{
+ Name: "LONGTEXT",
+ },
+ Length: 0,
+ Nullable: false,
+ DefaultIsEmpty: true,
+ })
+}
diff --git a/models/migrations/v1_25/v321_test.go b/models/migrations/v1_25/v321_test.go
new file mode 100644
index 0000000000..4897783fd3
--- /dev/null
+++ b/models/migrations/v1_25/v321_test.go
@@ -0,0 +1,70 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_25
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/migrations/base"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func Test_UseLongTextInSomeColumnsAndFixBugs(t *testing.T) {
+ if !setting.Database.Type.IsMySQL() {
+ t.Skip("Only MySQL needs to change from TEXT to LONGTEXT")
+ }
+
+ type ReviewState struct {
+ ID int64 `xorm:"pk autoincr"`
+ UserID int64 `xorm:"NOT NULL UNIQUE(pull_commit_user)"`
+ PullID int64 `xorm:"NOT NULL INDEX UNIQUE(pull_commit_user) DEFAULT 0"` // Which PR was the review on?
+ CommitSHA string `xorm:"NOT NULL VARCHAR(64) UNIQUE(pull_commit_user)"` // Which commit was the head commit for the review?
+ UpdatedFiles map[string]int `xorm:"NOT NULL TEXT JSON"` // Stores for each of the changed files of a PR whether they have been viewed, changed since last viewed, or not viewed
+ UpdatedUnix timeutil.TimeStamp `xorm:"updated"` // Is an accurate indicator of the order of commits as we do not expect it to be possible to make reviews on previous commits
+ }
+
+ type PackageProperty struct {
+ ID int64 `xorm:"pk autoincr"`
+ RefType int `xorm:"INDEX NOT NULL"`
+ RefID int64 `xorm:"INDEX NOT NULL"`
+ Name string `xorm:"INDEX NOT NULL"`
+ Value string `xorm:"TEXT NOT NULL"`
+ }
+
+ type Notice struct {
+ ID int64 `xorm:"pk autoincr"`
+ Type int
+ Description string `xorm:"LONGTEXT"`
+ CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
+ }
+
+ // Prepare and load the testing database
+ x, deferable := base.PrepareTestEnv(t, 0, new(ReviewState), new(PackageProperty), new(Notice))
+ defer deferable()
+
+ assert.NoError(t, UseLongTextInSomeColumnsAndFixBugs(x))
+
+ tables, err := x.DBMetas()
+ assert.NoError(t, err)
+
+ for _, table := range tables {
+ switch table.Name {
+ case "review_state":
+ column := table.GetColumn("updated_files")
+ assert.NotNil(t, column)
+ assert.Equal(t, "LONGTEXT", column.SQLType.Name)
+ case "package_property":
+ column := table.GetColumn("value")
+ assert.NotNil(t, column)
+ assert.Equal(t, "LONGTEXT", column.SQLType.Name)
+ case "notice":
+ column := table.GetColumn("description")
+ assert.NotNil(t, column)
+ assert.Equal(t, "LONGTEXT", column.SQLType.Name)
+ }
+ }
+}
diff --git a/models/migrations/v1_6/v70.go b/models/migrations/v1_6/v70.go
index 74434a84a1..41f0966942 100644
--- a/models/migrations/v1_6/v70.go
+++ b/models/migrations/v1_6/v70.go
@@ -1,7 +1,7 @@
// Copyright 2018 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_6 //nolint
+package v1_6
import (
"fmt"
diff --git a/models/migrations/v1_6/v71.go b/models/migrations/v1_6/v71.go
index 586187228b..2b11f57c92 100644
--- a/models/migrations/v1_6/v71.go
+++ b/models/migrations/v1_6/v71.go
@@ -1,7 +1,7 @@
// Copyright 2018 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_6 //nolint
+package v1_6
import (
"fmt"
diff --git a/models/migrations/v1_6/v72.go b/models/migrations/v1_6/v72.go
index 04cef9a170..9fad88a1b6 100644
--- a/models/migrations/v1_6/v72.go
+++ b/models/migrations/v1_6/v72.go
@@ -1,7 +1,7 @@
// Copyright 2018 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_6 //nolint
+package v1_6
import (
"fmt"
diff --git a/models/migrations/v1_7/v73.go b/models/migrations/v1_7/v73.go
index b5a748aae3..e0b7a28537 100644
--- a/models/migrations/v1_7/v73.go
+++ b/models/migrations/v1_7/v73.go
@@ -1,7 +1,7 @@
// Copyright 2018 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_7 //nolint
+package v1_7
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_7/v74.go b/models/migrations/v1_7/v74.go
index f0567e3c9b..376be37a24 100644
--- a/models/migrations/v1_7/v74.go
+++ b/models/migrations/v1_7/v74.go
@@ -1,7 +1,7 @@
// Copyright 2018 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_7 //nolint
+package v1_7
import "xorm.io/xorm"
diff --git a/models/migrations/v1_7/v75.go b/models/migrations/v1_7/v75.go
index fa7430970c..ef11575466 100644
--- a/models/migrations/v1_7/v75.go
+++ b/models/migrations/v1_7/v75.go
@@ -1,7 +1,7 @@
// Copyright 2018 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_7 //nolint
+package v1_7
import (
"xorm.io/builder"
diff --git a/models/migrations/v1_8/v76.go b/models/migrations/v1_8/v76.go
index d3fbd94deb..81e9307549 100644
--- a/models/migrations/v1_8/v76.go
+++ b/models/migrations/v1_8/v76.go
@@ -1,7 +1,7 @@
// Copyright 2018 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_8 //nolint
+package v1_8
import (
"fmt"
diff --git a/models/migrations/v1_8/v77.go b/models/migrations/v1_8/v77.go
index 8b19993924..4fe5ebe635 100644
--- a/models/migrations/v1_8/v77.go
+++ b/models/migrations/v1_8/v77.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_8 //nolint
+package v1_8
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_8/v78.go b/models/migrations/v1_8/v78.go
index 8f041c1484..e67f464131 100644
--- a/models/migrations/v1_8/v78.go
+++ b/models/migrations/v1_8/v78.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_8 //nolint
+package v1_8
import (
"code.gitea.io/gitea/models/migrations/base"
diff --git a/models/migrations/v1_8/v79.go b/models/migrations/v1_8/v79.go
index eb3a9ed0f4..3f50114d5a 100644
--- a/models/migrations/v1_8/v79.go
+++ b/models/migrations/v1_8/v79.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_8 //nolint
+package v1_8
import (
"code.gitea.io/gitea/modules/setting"
diff --git a/models/migrations/v1_8/v80.go b/models/migrations/v1_8/v80.go
index cebbbead28..6f9df47a93 100644
--- a/models/migrations/v1_8/v80.go
+++ b/models/migrations/v1_8/v80.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_8 //nolint
+package v1_8
import "xorm.io/xorm"
diff --git a/models/migrations/v1_8/v81.go b/models/migrations/v1_8/v81.go
index a100dc1ef7..3c2acc6458 100644
--- a/models/migrations/v1_8/v81.go
+++ b/models/migrations/v1_8/v81.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_8 //nolint
+package v1_8
import (
"fmt"
diff --git a/models/migrations/v1_9/v82.go b/models/migrations/v1_9/v82.go
index 26806dd645..c685d3b86b 100644
--- a/models/migrations/v1_9/v82.go
+++ b/models/migrations/v1_9/v82.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_9 //nolint
+package v1_9
import (
"fmt"
diff --git a/models/migrations/v1_9/v83.go b/models/migrations/v1_9/v83.go
index 10e6c45875..a0cd57f7c5 100644
--- a/models/migrations/v1_9/v83.go
+++ b/models/migrations/v1_9/v83.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_9 //nolint
+package v1_9
import (
"code.gitea.io/gitea/modules/timeutil"
diff --git a/models/migrations/v1_9/v84.go b/models/migrations/v1_9/v84.go
index c7155fe9cf..423915ae57 100644
--- a/models/migrations/v1_9/v84.go
+++ b/models/migrations/v1_9/v84.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_9 //nolint
+package v1_9
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_9/v85.go b/models/migrations/v1_9/v85.go
index a23d7c5d6e..48e1cd5dc4 100644
--- a/models/migrations/v1_9/v85.go
+++ b/models/migrations/v1_9/v85.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_9 //nolint
+package v1_9
import (
"fmt"
diff --git a/models/migrations/v1_9/v86.go b/models/migrations/v1_9/v86.go
index cf2725d158..9464ff0cf6 100644
--- a/models/migrations/v1_9/v86.go
+++ b/models/migrations/v1_9/v86.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_9 //nolint
+package v1_9
import (
"xorm.io/xorm"
diff --git a/models/migrations/v1_9/v87.go b/models/migrations/v1_9/v87.go
index fa01b6e5e3..81a4ebf80d 100644
--- a/models/migrations/v1_9/v87.go
+++ b/models/migrations/v1_9/v87.go
@@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package v1_9 //nolint
+package v1_9
import (
"xorm.io/xorm"
diff --git a/models/organization/org.go b/models/organization/org.go
index 3e55a36758..5eba004d69 100644
--- a/models/organization/org.go
+++ b/models/organization/org.go
@@ -178,12 +178,6 @@ func (org *Organization) HomeLink() string {
return org.AsUser().HomeLink()
}
-// CanCreateRepo returns if user login can create a repository
-// NOTE: functions calling this assume a failure due to repository count limit; if new checks are added, those functions should be revised
-func (org *Organization) CanCreateRepo() bool {
- return org.AsUser().CanCreateRepo()
-}
-
// FindOrgMembersOpts represensts find org members conditions
type FindOrgMembersOpts struct {
db.ListOptions
@@ -316,74 +310,69 @@ func CreateOrganization(ctx context.Context, org *Organization, owner *user_mode
org.NumMembers = 1
org.Type = user_model.UserTypeOrganization
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- if err = user_model.DeleteUserRedirect(ctx, org.Name); err != nil {
- return err
- }
-
- if err = db.Insert(ctx, org); err != nil {
- return fmt.Errorf("insert organization: %w", err)
- }
- if err = user_model.GenerateRandomAvatar(ctx, org.AsUser()); err != nil {
- return fmt.Errorf("generate random avatar: %w", err)
- }
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ if err = user_model.DeleteUserRedirect(ctx, org.Name); err != nil {
+ return err
+ }
- // Add initial creator to organization and owner team.
- if err = db.Insert(ctx, &OrgUser{
- UID: owner.ID,
- OrgID: org.ID,
- IsPublic: setting.Service.DefaultOrgMemberVisible,
- }); err != nil {
- return fmt.Errorf("insert org-user relation: %w", err)
- }
+ if err = db.Insert(ctx, org); err != nil {
+ return fmt.Errorf("insert organization: %w", err)
+ }
+ if err = user_model.GenerateRandomAvatar(ctx, org.AsUser()); err != nil {
+ return fmt.Errorf("generate random avatar: %w", err)
+ }
- // Create default owner team.
- t := &Team{
- OrgID: org.ID,
- LowerName: strings.ToLower(OwnerTeamName),
- Name: OwnerTeamName,
- AccessMode: perm.AccessModeOwner,
- NumMembers: 1,
- IncludesAllRepositories: true,
- CanCreateOrgRepo: true,
- }
- if err = db.Insert(ctx, t); err != nil {
- return fmt.Errorf("insert owner team: %w", err)
- }
+ // Add initial creator to organization and owner team.
+ if err = db.Insert(ctx, &OrgUser{
+ UID: owner.ID,
+ OrgID: org.ID,
+ IsPublic: setting.Service.DefaultOrgMemberVisible,
+ }); err != nil {
+ return fmt.Errorf("insert org-user relation: %w", err)
+ }
- // insert units for team
- units := make([]TeamUnit, 0, len(unit.AllRepoUnitTypes))
- for _, tp := range unit.AllRepoUnitTypes {
- up := perm.AccessModeOwner
- if tp == unit.TypeExternalTracker || tp == unit.TypeExternalWiki {
- up = perm.AccessModeRead
+ // Create default owner team.
+ t := &Team{
+ OrgID: org.ID,
+ LowerName: strings.ToLower(OwnerTeamName),
+ Name: OwnerTeamName,
+ AccessMode: perm.AccessModeOwner,
+ NumMembers: 1,
+ IncludesAllRepositories: true,
+ CanCreateOrgRepo: true,
+ }
+ if err = db.Insert(ctx, t); err != nil {
+ return fmt.Errorf("insert owner team: %w", err)
}
- units = append(units, TeamUnit{
- OrgID: org.ID,
- TeamID: t.ID,
- Type: tp,
- AccessMode: up,
- })
- }
- if err = db.Insert(ctx, &units); err != nil {
- return err
- }
+ // insert units for team
+ units := make([]TeamUnit, 0, len(unit.AllRepoUnitTypes))
+ for _, tp := range unit.AllRepoUnitTypes {
+ up := perm.AccessModeOwner
+ if tp == unit.TypeExternalTracker || tp == unit.TypeExternalWiki {
+ up = perm.AccessModeRead
+ }
+ units = append(units, TeamUnit{
+ OrgID: org.ID,
+ TeamID: t.ID,
+ Type: tp,
+ AccessMode: up,
+ })
+ }
- if err = db.Insert(ctx, &TeamUser{
- UID: owner.ID,
- OrgID: org.ID,
- TeamID: t.ID,
- }); err != nil {
- return fmt.Errorf("insert team-user relation: %w", err)
- }
+ if err = db.Insert(ctx, &units); err != nil {
+ return err
+ }
- return committer.Commit()
+ if err = db.Insert(ctx, &TeamUser{
+ UID: owner.ID,
+ OrgID: org.ID,
+ TeamID: t.ID,
+ }); err != nil {
+ return fmt.Errorf("insert team-user relation: %w", err)
+ }
+ return nil
+ })
}
// GetOrgByName returns organization by given name.
@@ -505,31 +494,26 @@ func AddOrgUser(ctx context.Context, orgID, uid int64) error {
return err
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- // check in transaction
- isAlreadyMember, err = IsOrganizationMember(ctx, orgID, uid)
- if err != nil || isAlreadyMember {
- return err
- }
-
- ou := &OrgUser{
- UID: uid,
- OrgID: orgID,
- IsPublic: setting.Service.DefaultOrgMemberVisible,
- }
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ // check in transaction
+ isAlreadyMember, err = IsOrganizationMember(ctx, orgID, uid)
+ if err != nil || isAlreadyMember {
+ return err
+ }
- if err := db.Insert(ctx, ou); err != nil {
- return err
- } else if _, err = db.Exec(ctx, "UPDATE `user` SET num_members = num_members + 1 WHERE id = ?", orgID); err != nil {
- return err
- }
+ ou := &OrgUser{
+ UID: uid,
+ OrgID: orgID,
+ IsPublic: setting.Service.DefaultOrgMemberVisible,
+ }
- return committer.Commit()
+ if err := db.Insert(ctx, ou); err != nil {
+ return err
+ } else if _, err = db.Exec(ctx, "UPDATE `user` SET num_members = num_members + 1 WHERE id = ?", orgID); err != nil {
+ return err
+ }
+ return nil
+ })
}
// GetOrgByID returns the user object by given ID if exists.
@@ -608,8 +592,3 @@ func getUserTeamIDsQueryBuilder(orgID, userID int64) *builder.Builder {
"team_user.uid": userID,
})
}
-
-// TeamsWithAccessToRepo returns all teams that have given access level to the repository.
-func (org *Organization) TeamsWithAccessToRepo(ctx context.Context, repoID int64, mode perm.AccessMode) ([]*Team, error) {
- return GetTeamsWithAccessToRepo(ctx, org.ID, repoID, mode)
-}
diff --git a/models/organization/org_list.go b/models/organization/org_list.go
index 78ac0e704a..81457191fe 100644
--- a/models/organization/org_list.go
+++ b/models/organization/org_list.go
@@ -50,8 +50,8 @@ type SearchOrganizationsOptions struct {
// FindOrgOptions finds orgs options
type FindOrgOptions struct {
db.ListOptions
- UserID int64
- IncludePrivate bool
+ UserID int64
+ IncludeVisibility structs.VisibleType
}
func queryUserOrgIDs(userID int64, includePrivate bool) *builder.Builder {
@@ -65,11 +65,10 @@ func queryUserOrgIDs(userID int64, includePrivate bool) *builder.Builder {
func (opts FindOrgOptions) ToConds() builder.Cond {
var cond builder.Cond = builder.Eq{"`user`.`type`": user_model.UserTypeOrganization}
if opts.UserID > 0 {
- cond = cond.And(builder.In("`user`.`id`", queryUserOrgIDs(opts.UserID, opts.IncludePrivate)))
- }
- if !opts.IncludePrivate {
- cond = cond.And(builder.Eq{"`user`.visibility": structs.VisibleTypePublic})
+ cond = cond.And(builder.In("`user`.`id`", queryUserOrgIDs(opts.UserID, opts.IncludeVisibility == structs.VisibleTypePrivate)))
}
+ // public=0, limited=1, private=2
+ cond = cond.And(builder.Lte{"`user`.visibility": opts.IncludeVisibility})
return cond
}
@@ -77,6 +76,16 @@ func (opts FindOrgOptions) ToOrders() string {
return "`user`.lower_name ASC"
}
+func DoerViewOtherVisibility(doer, other *user_model.User) structs.VisibleType {
+ if doer == nil || other == nil {
+ return structs.VisibleTypePublic
+ }
+ if doer.IsAdmin || doer.ID == other.ID {
+ return structs.VisibleTypePrivate
+ }
+ return structs.VisibleTypeLimited
+}
+
// GetOrgsCanCreateRepoByUserID returns a list of organizations where given user ID
// are allowed to create repos.
func GetOrgsCanCreateRepoByUserID(ctx context.Context, userID int64) ([]*Organization, error) {
diff --git a/models/organization/org_list_test.go b/models/organization/org_list_test.go
index 0f0f8a4bcd..a2a25c6f91 100644
--- a/models/organization/org_list_test.go
+++ b/models/organization/org_list_test.go
@@ -10,25 +10,32 @@ import (
"code.gitea.io/gitea/models/organization"
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/structs"
"github.com/stretchr/testify/assert"
)
-func TestCountOrganizations(t *testing.T) {
+func TestOrgList(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
+ t.Run("CountOrganizations", testCountOrganizations)
+ t.Run("FindOrgs", testFindOrgs)
+ t.Run("GetUserOrgsList", testGetUserOrgsList)
+ t.Run("LoadOrgListTeams", testLoadOrgListTeams)
+ t.Run("DoerViewOtherVisibility", testDoerViewOtherVisibility)
+}
+
+func testCountOrganizations(t *testing.T) {
expected, err := db.GetEngine(db.DefaultContext).Where("type=?", user_model.UserTypeOrganization).Count(&organization.Organization{})
assert.NoError(t, err)
- cnt, err := db.Count[organization.Organization](db.DefaultContext, organization.FindOrgOptions{IncludePrivate: true})
+ cnt, err := db.Count[organization.Organization](db.DefaultContext, organization.FindOrgOptions{IncludeVisibility: structs.VisibleTypePrivate})
assert.NoError(t, err)
assert.Equal(t, expected, cnt)
}
-func TestFindOrgs(t *testing.T) {
- assert.NoError(t, unittest.PrepareTestDatabase())
-
+func testFindOrgs(t *testing.T) {
orgs, err := db.Find[organization.Organization](db.DefaultContext, organization.FindOrgOptions{
- UserID: 4,
- IncludePrivate: true,
+ UserID: 4,
+ IncludeVisibility: structs.VisibleTypePrivate,
})
assert.NoError(t, err)
if assert.Len(t, orgs, 1) {
@@ -36,33 +43,30 @@ func TestFindOrgs(t *testing.T) {
}
orgs, err = db.Find[organization.Organization](db.DefaultContext, organization.FindOrgOptions{
- UserID: 4,
- IncludePrivate: false,
+ UserID: 4,
})
assert.NoError(t, err)
assert.Empty(t, orgs)
total, err := db.Count[organization.Organization](db.DefaultContext, organization.FindOrgOptions{
- UserID: 4,
- IncludePrivate: true,
+ UserID: 4,
+ IncludeVisibility: structs.VisibleTypePrivate,
})
assert.NoError(t, err)
assert.EqualValues(t, 1, total)
}
-func TestGetUserOrgsList(t *testing.T) {
- assert.NoError(t, unittest.PrepareTestDatabase())
+func testGetUserOrgsList(t *testing.T) {
orgs, err := organization.GetUserOrgsList(db.DefaultContext, &user_model.User{ID: 4})
assert.NoError(t, err)
if assert.Len(t, orgs, 1) {
assert.EqualValues(t, 3, orgs[0].ID)
// repo_id: 3 is in the team, 32 is public, 5 is private with no team
- assert.EqualValues(t, 2, orgs[0].NumRepos)
+ assert.Equal(t, 2, orgs[0].NumRepos)
}
}
-func TestLoadOrgListTeams(t *testing.T) {
- assert.NoError(t, unittest.PrepareTestDatabase())
+func testLoadOrgListTeams(t *testing.T) {
orgs, err := organization.GetUserOrgsList(db.DefaultContext, &user_model.User{ID: 4})
assert.NoError(t, err)
assert.Len(t, orgs, 1)
@@ -71,3 +75,10 @@ func TestLoadOrgListTeams(t *testing.T) {
assert.Len(t, teamsMap, 1)
assert.Len(t, teamsMap[3], 5)
}
+
+func testDoerViewOtherVisibility(t *testing.T) {
+ assert.Equal(t, structs.VisibleTypePublic, organization.DoerViewOtherVisibility(nil, nil))
+ assert.Equal(t, structs.VisibleTypeLimited, organization.DoerViewOtherVisibility(&user_model.User{ID: 1}, &user_model.User{ID: 2}))
+ assert.Equal(t, structs.VisibleTypePrivate, organization.DoerViewOtherVisibility(&user_model.User{ID: 1}, &user_model.User{ID: 1}))
+ assert.Equal(t, structs.VisibleTypePrivate, organization.DoerViewOtherVisibility(&user_model.User{ID: 1, IsAdmin: true}, &user_model.User{ID: 2}))
+}
diff --git a/models/organization/org_test.go b/models/organization/org_test.go
index 6638abd8e0..234325a8cd 100644
--- a/models/organization/org_test.go
+++ b/models/organization/org_test.go
@@ -135,7 +135,7 @@ func TestIsOrganizationOwner(t *testing.T) {
test := func(orgID, userID int64, expected bool) {
isOwner, err := organization.IsOrganizationOwner(db.DefaultContext, orgID, userID)
assert.NoError(t, err)
- assert.EqualValues(t, expected, isOwner)
+ assert.Equal(t, expected, isOwner)
}
test(3, 2, true)
test(3, 3, false)
@@ -149,7 +149,7 @@ func TestIsOrganizationMember(t *testing.T) {
test := func(orgID, userID int64, expected bool) {
isMember, err := organization.IsOrganizationMember(db.DefaultContext, orgID, userID)
assert.NoError(t, err)
- assert.EqualValues(t, expected, isMember)
+ assert.Equal(t, expected, isMember)
}
test(3, 2, true)
test(3, 3, false)
@@ -164,7 +164,7 @@ func TestIsPublicMembership(t *testing.T) {
test := func(orgID, userID int64, expected bool) {
isMember, err := organization.IsPublicMembership(db.DefaultContext, orgID, userID)
assert.NoError(t, err)
- assert.EqualValues(t, expected, isMember)
+ assert.Equal(t, expected, isMember)
}
test(3, 2, true)
test(3, 3, false)
@@ -237,7 +237,7 @@ func TestRestrictedUserOrgMembers(t *testing.T) {
memberUIDs = append(memberUIDs, member.UID)
}
slices.Sort(memberUIDs)
- assert.EqualValues(t, tc.expectedUIDs, memberUIDs)
+ assert.Equal(t, tc.expectedUIDs, memberUIDs)
})
}
}
@@ -255,7 +255,7 @@ func TestGetOrgUsersByOrgID(t *testing.T) {
sort.Slice(orgUsers, func(i, j int) bool {
return orgUsers[i].ID < orgUsers[j].ID
})
- assert.EqualValues(t, []*organization.OrgUser{{
+ assert.Equal(t, []*organization.OrgUser{{
ID: 1,
OrgID: 3,
UID: 2,
@@ -322,7 +322,7 @@ func TestAccessibleReposEnv_CountRepos(t *testing.T) {
assert.NoError(t, err)
count, err := env.CountRepos(db.DefaultContext)
assert.NoError(t, err)
- assert.EqualValues(t, expectedCount, count)
+ assert.Equal(t, expectedCount, count)
}
testSuccess(2, 3)
testSuccess(4, 2)
@@ -334,7 +334,7 @@ func TestAccessibleReposEnv_RepoIDs(t *testing.T) {
testSuccess := func(userID int64, expectedRepoIDs []int64) {
env, err := repo_model.AccessibleReposEnv(db.DefaultContext, org, userID)
assert.NoError(t, err)
- repoIDs, err := env.RepoIDs(db.DefaultContext, 1, 100)
+ repoIDs, err := env.RepoIDs(db.DefaultContext)
assert.NoError(t, err)
assert.Equal(t, expectedRepoIDs, repoIDs)
}
@@ -342,25 +342,6 @@ func TestAccessibleReposEnv_RepoIDs(t *testing.T) {
testSuccess(4, []int64{3, 32})
}
-func TestAccessibleReposEnv_Repos(t *testing.T) {
- assert.NoError(t, unittest.PrepareTestDatabase())
- org := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3})
- testSuccess := func(userID int64, expectedRepoIDs []int64) {
- env, err := repo_model.AccessibleReposEnv(db.DefaultContext, org, userID)
- assert.NoError(t, err)
- repos, err := env.Repos(db.DefaultContext, 1, 100)
- assert.NoError(t, err)
- expectedRepos := make(repo_model.RepositoryList, len(expectedRepoIDs))
- for i, repoID := range expectedRepoIDs {
- expectedRepos[i] = unittest.AssertExistsAndLoadBean(t,
- &repo_model.Repository{ID: repoID})
- }
- assert.Equal(t, expectedRepos, repos)
- }
- testSuccess(2, []int64{3, 5, 32})
- testSuccess(4, []int64{3, 32})
-}
-
func TestAccessibleReposEnv_MirrorRepos(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
org := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3})
diff --git a/models/organization/org_user.go b/models/organization/org_user.go
index 08d936d922..4d7527c15f 100644
--- a/models/organization/org_user.go
+++ b/models/organization/org_user.go
@@ -78,7 +78,7 @@ func IsOrganizationAdmin(ctx context.Context, orgID, uid int64) (bool, error) {
return false, err
}
for _, t := range teams {
- if t.AccessMode >= perm.AccessModeAdmin {
+ if t.HasAdminAccess() {
return true, nil
}
}
diff --git a/models/organization/org_user_test.go b/models/organization/org_user_test.go
index c5110b2a34..689544430d 100644
--- a/models/organization/org_user_test.go
+++ b/models/organization/org_user_test.go
@@ -139,7 +139,7 @@ func TestAddOrgUser(t *testing.T) {
unittest.AssertExistsAndLoadBean(t, ou)
assert.Equal(t, isPublic, ou.IsPublic)
org = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: orgID})
- assert.EqualValues(t, expectedNumMembers, org.NumMembers)
+ assert.Equal(t, expectedNumMembers, org.NumMembers)
}
setting.Service.DefaultOrgMemberVisible = false
diff --git a/models/organization/team.go b/models/organization/team.go
index 96666da39a..7f3a9b3829 100644
--- a/models/organization/team.go
+++ b/models/organization/team.go
@@ -113,7 +113,7 @@ func (t *Team) LoadUnits(ctx context.Context) (err error) {
// GetUnitNames returns the team units names
func (t *Team) GetUnitNames() (res []string) {
- if t.AccessMode >= perm.AccessModeAdmin {
+ if t.HasAdminAccess() {
return unit.AllUnitKeyNames()
}
@@ -126,7 +126,7 @@ func (t *Team) GetUnitNames() (res []string) {
// GetUnitsMap returns the team units permissions
func (t *Team) GetUnitsMap() map[string]string {
m := make(map[string]string)
- if t.AccessMode >= perm.AccessModeAdmin {
+ if t.HasAdminAccess() {
for _, u := range unit.Units {
m[u.NameKey] = t.AccessMode.ToString()
}
@@ -153,6 +153,10 @@ func (t *Team) IsMember(ctx context.Context, userID int64) bool {
return isMember
}
+func (t *Team) HasAdminAccess() bool {
+ return t.AccessMode >= perm.AccessModeAdmin
+}
+
// LoadMembers returns paginated members in team of organization.
func (t *Team) LoadMembers(ctx context.Context) (err error) {
t.Members, err = GetTeamMembers(ctx, &SearchMembersOptions{
@@ -238,22 +242,6 @@ func GetTeamByID(ctx context.Context, teamID int64) (*Team, error) {
return t, nil
}
-// GetTeamNamesByID returns team's lower name from a list of team ids.
-func GetTeamNamesByID(ctx context.Context, teamIDs []int64) ([]string, error) {
- if len(teamIDs) == 0 {
- return []string{}, nil
- }
-
- var teamNames []string
- err := db.GetEngine(ctx).Table("team").
- Select("lower_name").
- In("id", teamIDs).
- Asc("name").
- Find(&teamNames)
-
- return teamNames, err
-}
-
// IncrTeamRepoNum increases the number of repos for the given team by 1
func IncrTeamRepoNum(ctx context.Context, teamID int64) error {
_, err := db.GetEngine(ctx).Incr("num_repos").ID(teamID).Update(new(Team))
diff --git a/models/organization/team_repo.go b/models/organization/team_repo.go
index 53edd203a8..b3e266dbc7 100644
--- a/models/organization/team_repo.go
+++ b/models/organization/team_repo.go
@@ -9,6 +9,8 @@ import (
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/perm"
"code.gitea.io/gitea/models/unit"
+
+ "xorm.io/builder"
)
// TeamRepo represents an team-repository relation.
@@ -48,26 +50,27 @@ func RemoveTeamRepo(ctx context.Context, teamID, repoID int64) error {
return err
}
-// GetTeamsWithAccessToRepo returns all teams in an organization that have given access level to the repository.
-func GetTeamsWithAccessToRepo(ctx context.Context, orgID, repoID int64, mode perm.AccessMode) ([]*Team, error) {
+// GetTeamsWithAccessToAnyRepoUnit returns all teams in an organization that have given access level to the repository special unit.
+// This function is only used for finding some teams that can be used as branch protection allowlist or reviewers, it isn't really used for access control.
+// FIXME: TEAM-UNIT-PERMISSION this logic is not complete, search the fixme keyword to see more details
+func GetTeamsWithAccessToAnyRepoUnit(ctx context.Context, orgID, repoID int64, mode perm.AccessMode, unitType unit.Type, unitTypesMore ...unit.Type) ([]*Team, error) {
teams := make([]*Team, 0, 5)
- return teams, db.GetEngine(ctx).Where("team.authorize >= ?", mode).
- Join("INNER", "team_repo", "team_repo.team_id = team.id").
- And("team_repo.org_id = ?", orgID).
- And("team_repo.repo_id = ?", repoID).
- OrderBy("name").
- Find(&teams)
-}
-// GetTeamsWithAccessToRepoUnit returns all teams in an organization that have given access level to the repository special unit.
-func GetTeamsWithAccessToRepoUnit(ctx context.Context, orgID, repoID int64, mode perm.AccessMode, unitType unit.Type) ([]*Team, error) {
- teams := make([]*Team, 0, 5)
- return teams, db.GetEngine(ctx).Where("team_unit.access_mode >= ?", mode).
+ sub := builder.Select("team_id").From("team_unit").
+ Where(builder.Expr("team_unit.team_id = team.id")).
+ And(builder.In("team_unit.type", append([]unit.Type{unitType}, unitTypesMore...))).
+ And(builder.Expr("team_unit.access_mode >= ?", mode))
+
+ err := db.GetEngine(ctx).
Join("INNER", "team_repo", "team_repo.team_id = team.id").
- Join("INNER", "team_unit", "team_unit.team_id = team.id").
And("team_repo.org_id = ?", orgID).
And("team_repo.repo_id = ?", repoID).
- And("team_unit.type = ?", unitType).
+ And(builder.Or(
+ builder.Expr("team.authorize >= ?", mode),
+ builder.In("team.id", sub),
+ )).
OrderBy("name").
Find(&teams)
+
+ return teams, err
}
diff --git a/models/organization/team_repo_test.go b/models/organization/team_repo_test.go
index c0d6750df9..73a06a93c2 100644
--- a/models/organization/team_repo_test.go
+++ b/models/organization/team_repo_test.go
@@ -22,7 +22,7 @@ func TestGetTeamsWithAccessToRepoUnit(t *testing.T) {
org41 := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 41})
repo61 := unittest.AssertExistsAndLoadBean(t, &repo.Repository{ID: 61})
- teams, err := organization.GetTeamsWithAccessToRepoUnit(db.DefaultContext, org41.ID, repo61.ID, perm.AccessModeRead, unit.TypePullRequests)
+ teams, err := organization.GetTeamsWithAccessToAnyRepoUnit(db.DefaultContext, org41.ID, repo61.ID, perm.AccessModeRead, unit.TypePullRequests)
assert.NoError(t, err)
if assert.Len(t, teams, 2) {
assert.EqualValues(t, 21, teams[0].ID)
diff --git a/models/organization/team_test.go b/models/organization/team_test.go
index deaabbfa2c..b0bf842584 100644
--- a/models/organization/team_test.go
+++ b/models/organization/team_test.go
@@ -77,7 +77,7 @@ func TestGetTeam(t *testing.T) {
testSuccess := func(orgID int64, name string) {
team, err := organization.GetTeam(db.DefaultContext, orgID, name)
assert.NoError(t, err)
- assert.EqualValues(t, orgID, team.OrgID)
+ assert.Equal(t, orgID, team.OrgID)
assert.Equal(t, name, team.Name)
}
testSuccess(3, "Owners")
@@ -95,7 +95,7 @@ func TestGetTeamByID(t *testing.T) {
testSuccess := func(teamID int64) {
team, err := organization.GetTeamByID(db.DefaultContext, teamID)
assert.NoError(t, err)
- assert.EqualValues(t, teamID, team.ID)
+ assert.Equal(t, teamID, team.ID)
}
testSuccess(1)
testSuccess(2)
@@ -163,7 +163,7 @@ func TestGetUserOrgTeams(t *testing.T) {
teams, err := organization.GetUserOrgTeams(db.DefaultContext, orgID, userID)
assert.NoError(t, err)
for _, team := range teams {
- assert.EqualValues(t, orgID, team.OrgID)
+ assert.Equal(t, orgID, team.OrgID)
unittest.AssertExistsAndLoadBean(t, &organization.TeamUser{TeamID: team.ID, UID: userID})
}
}
diff --git a/models/organization/team_unit.go b/models/organization/team_unit.go
index 3087b70770..c6ec6b39b2 100644
--- a/models/organization/team_unit.go
+++ b/models/organization/team_unit.go
@@ -31,21 +31,16 @@ func getUnitsByTeamID(ctx context.Context, teamID int64) (units []*TeamUnit, err
// UpdateTeamUnits updates a teams's units
func UpdateTeamUnits(ctx context.Context, team *Team, units []TeamUnit) (err error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- if _, err = db.GetEngine(ctx).Where("team_id = ?", team.ID).Delete(new(TeamUnit)); err != nil {
- return err
- }
-
- if len(units) > 0 {
- if err = db.Insert(ctx, units); err != nil {
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ if _, err = db.GetEngine(ctx).Where("team_id = ?", team.ID).Delete(new(TeamUnit)); err != nil {
return err
}
- }
- return committer.Commit()
+ if len(units) > 0 {
+ if err = db.Insert(ctx, units); err != nil {
+ return err
+ }
+ }
+ return nil
+ })
}
diff --git a/models/packages/container/const.go b/models/packages/container/const.go
deleted file mode 100644
index 0dfbda051d..0000000000
--- a/models/packages/container/const.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright 2022 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package container
-
-const (
- ManifestFilename = "manifest.json"
- UploadVersion = "_upload"
-)
diff --git a/models/packages/container/search.go b/models/packages/container/search.go
index 5df35117ce..9321d9eb41 100644
--- a/models/packages/container/search.go
+++ b/models/packages/container/search.go
@@ -25,6 +25,7 @@ type BlobSearchOptions struct {
Digest string
Tag string
IsManifest bool
+ OnlyLead bool
Repository string
}
@@ -43,7 +44,10 @@ func (opts *BlobSearchOptions) toConds() builder.Cond {
cond = cond.And(builder.Eq{"package_version.lower_version": strings.ToLower(opts.Tag)})
}
if opts.IsManifest {
- cond = cond.And(builder.Eq{"package_file.lower_name": ManifestFilename})
+ cond = cond.And(builder.Eq{"package_file.lower_name": container_module.ManifestFilename})
+ }
+ if opts.OnlyLead {
+ cond = cond.And(builder.Eq{"package_file.is_lead": true})
}
if opts.Digest != "" {
var propsCond builder.Cond = builder.Eq{
@@ -73,11 +77,9 @@ func GetContainerBlob(ctx context.Context, opts *BlobSearchOptions) (*packages.P
pfds, err := getContainerBlobsLimit(ctx, opts, 1)
if err != nil {
return nil, err
- }
- if len(pfds) != 1 {
+ } else if len(pfds) == 0 {
return nil, ErrContainerBlobNotExist
}
-
return pfds[0], nil
}
@@ -233,7 +235,7 @@ func SearchImageTags(ctx context.Context, opts *ImageTagsSearchOptions) ([]*pack
func SearchExpiredUploadedBlobs(ctx context.Context, olderThan time.Duration) ([]*packages.PackageFile, error) {
var cond builder.Cond = builder.Eq{
"package_version.is_internal": true,
- "package_version.lower_version": UploadVersion,
+ "package_version.lower_version": container_module.UploadVersion,
"package.type": packages.TypeContainer,
}
cond = cond.And(builder.Lt{"package_file.created_unix": time.Now().Add(-olderThan).Unix()})
diff --git a/models/packages/descriptor.go b/models/packages/descriptor.go
index d251fcc4a9..2d43dc3046 100644
--- a/models/packages/descriptor.go
+++ b/models/packages/descriptor.go
@@ -11,6 +11,7 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/cache"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/packages/alpine"
"code.gitea.io/gitea/modules/packages/arch"
@@ -102,22 +103,26 @@ func (pd *PackageDescriptor) CalculateBlobSize() int64 {
// GetPackageDescriptor gets the package description for a version
func GetPackageDescriptor(ctx context.Context, pv *PackageVersion) (*PackageDescriptor, error) {
- p, err := GetPackageByID(ctx, pv.PackageID)
+ return GetPackageDescriptorWithCache(ctx, pv, cache.NewEphemeralCache())
+}
+
+func GetPackageDescriptorWithCache(ctx context.Context, pv *PackageVersion, c *cache.EphemeralCache) (*PackageDescriptor, error) {
+ p, err := cache.GetWithEphemeralCache(ctx, c, "package", pv.PackageID, GetPackageByID)
if err != nil {
return nil, err
}
- o, err := user_model.GetUserByID(ctx, p.OwnerID)
+ o, err := cache.GetWithEphemeralCache(ctx, c, "user", p.OwnerID, user_model.GetUserByID)
if err != nil {
return nil, err
}
var repository *repo_model.Repository
if p.RepoID > 0 {
- repository, err = repo_model.GetRepositoryByID(ctx, p.RepoID)
+ repository, err = cache.GetWithEphemeralCache(ctx, c, "repo", p.RepoID, repo_model.GetRepositoryByID)
if err != nil && !repo_model.IsErrRepoNotExist(err) {
return nil, err
}
}
- creator, err := user_model.GetUserByID(ctx, pv.CreatorID)
+ creator, err := cache.GetWithEphemeralCache(ctx, c, "user", pv.CreatorID, user_model.GetUserByID)
if err != nil {
if errors.Is(err, util.ErrNotExist) {
creator = user_model.NewGhostUser()
@@ -145,9 +150,13 @@ func GetPackageDescriptor(ctx context.Context, pv *PackageVersion) (*PackageDesc
return nil, err
}
- pfds, err := GetPackageFileDescriptors(ctx, pfs)
- if err != nil {
- return nil, err
+ pfds := make([]*PackageFileDescriptor, 0, len(pfs))
+ for _, pf := range pfs {
+ pfd, err := getPackageFileDescriptor(ctx, pf, c)
+ if err != nil {
+ return nil, err
+ }
+ pfds = append(pfds, pfd)
}
var metadata any
@@ -197,7 +206,7 @@ func GetPackageDescriptor(ctx context.Context, pv *PackageVersion) (*PackageDesc
case TypeVagrant:
metadata = &vagrant.Metadata{}
default:
- panic(fmt.Sprintf("unknown package type: %s", string(p.Type)))
+ panic("unknown package type: " + string(p.Type))
}
if metadata != nil {
if err := json.Unmarshal([]byte(pv.MetadataJSON), &metadata); err != nil {
@@ -221,7 +230,11 @@ func GetPackageDescriptor(ctx context.Context, pv *PackageVersion) (*PackageDesc
// GetPackageFileDescriptor gets a package file descriptor for a package file
func GetPackageFileDescriptor(ctx context.Context, pf *PackageFile) (*PackageFileDescriptor, error) {
- pb, err := GetBlobByID(ctx, pf.BlobID)
+ return getPackageFileDescriptor(ctx, pf, cache.NewEphemeralCache())
+}
+
+func getPackageFileDescriptor(ctx context.Context, pf *PackageFile, c *cache.EphemeralCache) (*PackageFileDescriptor, error) {
+ pb, err := cache.GetWithEphemeralCache(ctx, c, "package_file_blob", pf.BlobID, GetBlobByID)
if err != nil {
return nil, err
}
@@ -251,9 +264,13 @@ func GetPackageFileDescriptors(ctx context.Context, pfs []*PackageFile) ([]*Pack
// GetPackageDescriptors gets the package descriptions for the versions
func GetPackageDescriptors(ctx context.Context, pvs []*PackageVersion) ([]*PackageDescriptor, error) {
+ return getPackageDescriptors(ctx, pvs, cache.NewEphemeralCache())
+}
+
+func getPackageDescriptors(ctx context.Context, pvs []*PackageVersion, c *cache.EphemeralCache) ([]*PackageDescriptor, error) {
pds := make([]*PackageDescriptor, 0, len(pvs))
for _, pv := range pvs {
- pd, err := GetPackageDescriptor(ctx, pv)
+ pd, err := GetPackageDescriptorWithCache(ctx, pv, c)
if err != nil {
return nil, err
}
diff --git a/models/packages/nuget/search.go b/models/packages/nuget/search.go
index 7a505ff08f..a4b23f31d5 100644
--- a/models/packages/nuget/search.go
+++ b/models/packages/nuget/search.go
@@ -33,7 +33,7 @@ func SearchVersions(ctx context.Context, opts *packages_model.PackageSearchOptio
Where(cond).
OrderBy("package.name ASC")
if opts.Paginator != nil {
- skip, take := opts.GetSkipTake()
+ skip, take := opts.Paginator.GetSkipTake()
inner = inner.Limit(take, skip)
}
diff --git a/models/packages/package.go b/models/packages/package.go
index 8935dbaa1c..38d1cdcf66 100644
--- a/models/packages/package.go
+++ b/models/packages/package.go
@@ -127,7 +127,7 @@ func (pt Type) Name() string {
case TypeVagrant:
return "Vagrant"
}
- panic(fmt.Sprintf("unknown package type: %s", string(pt)))
+ panic("unknown package type: " + string(pt))
}
// SVGName gets the name of the package type svg image
@@ -178,7 +178,7 @@ func (pt Type) SVGName() string {
case TypeVagrant:
return "gitea-vagrant"
}
- panic(fmt.Sprintf("unknown package type: %s", string(pt)))
+ panic("unknown package type: " + string(pt))
}
// Package represents a package
diff --git a/models/packages/package_file.go b/models/packages/package_file.go
index 270cb32fdf..bf877485d6 100644
--- a/models/packages/package_file.go
+++ b/models/packages/package_file.go
@@ -115,6 +115,11 @@ func DeleteFileByID(ctx context.Context, fileID int64) error {
return err
}
+func UpdateFile(ctx context.Context, pf *PackageFile, cols []string) error {
+ _, err := db.GetEngine(ctx).ID(pf.ID).Cols(cols...).Update(pf)
+ return err
+}
+
// PackageFileSearchOptions are options for SearchXXX methods
type PackageFileSearchOptions struct {
OwnerID int64
diff --git a/models/packages/package_property.go b/models/packages/package_property.go
index e0170016cf..acc05d8d5a 100644
--- a/models/packages/package_property.go
+++ b/models/packages/package_property.go
@@ -32,7 +32,7 @@ type PackageProperty struct {
RefType PropertyType `xorm:"INDEX NOT NULL"`
RefID int64 `xorm:"INDEX NOT NULL"`
Name string `xorm:"INDEX NOT NULL"`
- Value string `xorm:"TEXT NOT NULL"`
+ Value string `xorm:"LONGTEXT NOT NULL"`
}
// InsertProperty creates a property
@@ -66,6 +66,20 @@ func UpdateProperty(ctx context.Context, pp *PackageProperty) error {
return err
}
+func InsertOrUpdateProperty(ctx context.Context, refType PropertyType, refID int64, name, value string) error {
+ pp := PackageProperty{RefType: refType, RefID: refID, Name: name}
+ ok, err := db.GetEngine(ctx).Get(&pp)
+ if err != nil {
+ return err
+ }
+ if ok {
+ _, err = db.GetEngine(ctx).Where("ref_type=? AND ref_id=? AND name=?", refType, refID, name).Cols("value").Update(&PackageProperty{Value: value})
+ return err
+ }
+ _, err = InsertProperty(ctx, refType, refID, name, value)
+ return err
+}
+
// DeleteAllProperties deletes all properties of a ref
func DeleteAllProperties(ctx context.Context, refType PropertyType, refID int64) error {
_, err := db.GetEngine(ctx).Where("ref_type = ? AND ref_id = ?", refType, refID).Delete(&PackageProperty{})
@@ -78,8 +92,8 @@ func DeletePropertyByID(ctx context.Context, propertyID int64) error {
return err
}
-// DeletePropertyByName deletes properties by name
-func DeletePropertyByName(ctx context.Context, refType PropertyType, refID int64, name string) error {
+// DeletePropertiesByName deletes properties by name
+func DeletePropertiesByName(ctx context.Context, refType PropertyType, refID int64, name string) error {
_, err := db.GetEngine(ctx).Where("ref_type = ? AND ref_id = ? AND name = ?", refType, refID, name).Delete(&PackageProperty{})
return err
}
diff --git a/models/packages/package_version.go b/models/packages/package_version.go
index 278e8e3a86..0a478c0323 100644
--- a/models/packages/package_version.go
+++ b/models/packages/package_version.go
@@ -14,6 +14,7 @@ import (
"code.gitea.io/gitea/modules/util"
"xorm.io/builder"
+ "xorm.io/xorm"
)
// ErrDuplicatePackageVersion indicates a duplicated package version error
@@ -36,6 +37,14 @@ type PackageVersion struct {
DownloadCount int64 `xorm:"NOT NULL DEFAULT 0"`
}
+// IsPrerelease checks if the version is a prerelease version according to semantic versioning
+func (pv *PackageVersion) IsPrerelease() bool {
+ if pv == nil || pv.Version == "" {
+ return false
+ }
+ return strings.Contains(pv.Version, "-")
+}
+
// GetOrInsertVersion inserts a version. If the same version exist already ErrDuplicatePackageVersion is returned
func GetOrInsertVersion(ctx context.Context, pv *PackageVersion) (*PackageVersion, error) {
e := db.GetEngine(ctx)
@@ -187,7 +196,7 @@ type PackageSearchOptions struct {
HasFileWithName string // only results are found which are associated with a file with the specific name
HasFiles optional.Option[bool] // only results are found which have associated files
Sort VersionSort
- db.Paginator
+ Paginator db.Paginator
}
func (opts *PackageSearchOptions) ToConds() builder.Cond {
@@ -279,9 +288,19 @@ func (opts *PackageSearchOptions) configureOrderBy(e db.Engine) {
default:
e.Desc("package_version.created_unix")
}
+ e.Desc("package_version.id") // Sort by id for stable order with duplicates in the other field
+}
- // Sort by id for stable order with duplicates in the other field
- e.Asc("package_version.id")
+func searchVersionsBySession(sess *xorm.Session, opts *PackageSearchOptions) ([]*PackageVersion, int64, error) {
+ opts.configureOrderBy(sess)
+ pvs := make([]*PackageVersion, 0, 10)
+ if opts.Paginator != nil {
+ sess = db.SetSessionPagination(sess, opts.Paginator)
+ count, err := sess.FindAndCount(&pvs)
+ return pvs, count, err
+ }
+ err := sess.Find(&pvs)
+ return pvs, int64(len(pvs)), err
}
// SearchVersions gets all versions of packages matching the search options
@@ -291,16 +310,7 @@ func SearchVersions(ctx context.Context, opts *PackageSearchOptions) ([]*Package
Table("package_version").
Join("INNER", "package", "package.id = package_version.package_id").
Where(opts.ToConds())
-
- opts.configureOrderBy(sess)
-
- if opts.Paginator != nil {
- sess = db.SetSessionPagination(sess, opts)
- }
-
- pvs := make([]*PackageVersion, 0, 10)
- count, err := sess.FindAndCount(&pvs)
- return pvs, count, err
+ return searchVersionsBySession(sess, opts)
}
// SearchLatestVersions gets the latest version of every package matching the search options
@@ -318,15 +328,7 @@ func SearchLatestVersions(ctx context.Context, opts *PackageSearchOptions) ([]*P
Join("INNER", "package", "package.id = package_version.package_id").
Where(builder.In("package_version.id", in))
- opts.configureOrderBy(sess)
-
- if opts.Paginator != nil {
- sess = db.SetSessionPagination(sess, opts)
- }
-
- pvs := make([]*PackageVersion, 0, 10)
- count, err := sess.FindAndCount(&pvs)
- return pvs, count, err
+ return searchVersionsBySession(sess, opts)
}
// ExistVersion checks if a version matching the search options exist
diff --git a/models/perm/access/repo_permission.go b/models/perm/access/repo_permission.go
index 5e7ecb31ea..7de43ecd07 100644
--- a/models/perm/access/repo_permission.go
+++ b/models/perm/access/repo_permission.go
@@ -15,6 +15,7 @@ import (
"code.gitea.io/gitea/models/unit"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
)
@@ -25,7 +26,8 @@ type Permission struct {
units []*repo_model.RepoUnit
unitsMode map[unit.Type]perm_model.AccessMode
- everyoneAccessMode map[unit.Type]perm_model.AccessMode
+ everyoneAccessMode map[unit.Type]perm_model.AccessMode // the unit's minimal access mode for every signed-in user
+ anonymousAccessMode map[unit.Type]perm_model.AccessMode // the unit's minimal access mode for anonymous (non-signed-in) user
}
// IsOwner returns true if current user is the owner of repository.
@@ -39,7 +41,8 @@ func (p *Permission) IsAdmin() bool {
}
// HasAnyUnitAccess returns true if the user might have at least one access mode to any unit of this repository.
-// It doesn't count the "everyone access mode".
+// It doesn't count the "public(anonymous/everyone) access mode".
+// TODO: most calls to this function should be replaced with `HasAnyUnitAccessOrPublicAccess`
func (p *Permission) HasAnyUnitAccess() bool {
for _, v := range p.unitsMode {
if v >= perm_model.AccessModeRead {
@@ -49,13 +52,22 @@ func (p *Permission) HasAnyUnitAccess() bool {
return p.AccessMode >= perm_model.AccessModeRead
}
-func (p *Permission) HasAnyUnitAccessOrEveryoneAccess() bool {
+func (p *Permission) HasAnyUnitPublicAccess() bool {
+ for _, v := range p.anonymousAccessMode {
+ if v >= perm_model.AccessModeRead {
+ return true
+ }
+ }
for _, v := range p.everyoneAccessMode {
if v >= perm_model.AccessModeRead {
return true
}
}
- return p.HasAnyUnitAccess()
+ return false
+}
+
+func (p *Permission) HasAnyUnitAccessOrPublicAccess() bool {
+ return p.HasAnyUnitPublicAccess() || p.HasAnyUnitAccess()
}
// HasUnits returns true if the permission contains attached units
@@ -73,14 +85,16 @@ func (p *Permission) GetFirstUnitRepoID() int64 {
}
// UnitAccessMode returns current user access mode to the specify unit of the repository
-// It also considers "everyone access mode"
+// It also considers "public (anonymous/everyone) access mode"
func (p *Permission) UnitAccessMode(unitType unit.Type) perm_model.AccessMode {
// if the units map contains the access mode, use it, but admin/owner mode could override it
if m, ok := p.unitsMode[unitType]; ok {
return util.Iif(p.AccessMode >= perm_model.AccessModeAdmin, p.AccessMode, m)
}
// if the units map does not contain the access mode, return the default access mode if the unit exists
- unitDefaultAccessMode := max(p.AccessMode, p.everyoneAccessMode[unitType])
+ unitDefaultAccessMode := p.AccessMode
+ unitDefaultAccessMode = max(unitDefaultAccessMode, p.anonymousAccessMode[unitType])
+ unitDefaultAccessMode = max(unitDefaultAccessMode, p.everyoneAccessMode[unitType])
hasUnit := slices.ContainsFunc(p.units, func(u *repo_model.RepoUnit) bool { return u.Type == unitType })
return util.Iif(hasUnit, unitDefaultAccessMode, perm_model.AccessModeNone)
}
@@ -171,27 +185,41 @@ func (p *Permission) LogString() string {
format += "\n\tunitsMode[%-v]: %-v"
args = append(args, key.LogString(), value.LogString())
}
+ format += "\n\tanonymousAccessMode: %-v"
+ args = append(args, p.anonymousAccessMode)
format += "\n\teveryoneAccessMode: %-v"
args = append(args, p.everyoneAccessMode)
format += "\n\t]>"
return fmt.Sprintf(format, args...)
}
+func applyPublicAccessPermission(unitType unit.Type, accessMode perm_model.AccessMode, modeMap *map[unit.Type]perm_model.AccessMode) {
+ if setting.Repository.ForcePrivate {
+ return
+ }
+ if accessMode >= perm_model.AccessModeRead && accessMode > (*modeMap)[unitType] {
+ if *modeMap == nil {
+ *modeMap = make(map[unit.Type]perm_model.AccessMode)
+ }
+ (*modeMap)[unitType] = accessMode
+ }
+}
+
func finalProcessRepoUnitPermission(user *user_model.User, perm *Permission) {
+ // apply public (anonymous) access permissions
+ for _, u := range perm.units {
+ applyPublicAccessPermission(u.Type, u.AnonymousAccessMode, &perm.anonymousAccessMode)
+ }
+
if user == nil || user.ID <= 0 {
// for anonymous access, it could be:
// AccessMode is None or Read, units has repo units, unitModes is nil
return
}
- // apply everyone access permissions
+ // apply public (everyone) access permissions
for _, u := range perm.units {
- if u.EveryoneAccessMode >= perm_model.AccessModeRead && u.EveryoneAccessMode > perm.everyoneAccessMode[u.Type] {
- if perm.everyoneAccessMode == nil {
- perm.everyoneAccessMode = make(map[unit.Type]perm_model.AccessMode)
- }
- perm.everyoneAccessMode[u.Type] = u.EveryoneAccessMode
- }
+ applyPublicAccessPermission(u.Type, u.EveryoneAccessMode, &perm.everyoneAccessMode)
}
if perm.unitsMode == nil {
@@ -209,6 +237,11 @@ func finalProcessRepoUnitPermission(user *user_model.User, perm *Permission) {
break
}
}
+ for t := range perm.anonymousAccessMode {
+ if shouldKeep = shouldKeep || u.Type == t; shouldKeep {
+ break
+ }
+ }
for t := range perm.everyoneAccessMode {
if shouldKeep = shouldKeep || u.Type == t; shouldKeep {
break
@@ -235,7 +268,6 @@ func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use
perm.units = repo.Units
// anonymous user visit private repo.
- // TODO: anonymous user visit public unit of private repo???
if user == nil && repo.IsPrivate {
perm.AccessMode = perm_model.AccessModeNone
return perm, nil
@@ -254,7 +286,8 @@ func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use
}
// Prevent strangers from checking out public repo of private organization/users
- // Allow user if they are collaborator of a repo within a private user or a private organization but not a member of the organization itself
+ // Allow user if they are a collaborator of a repo within a private user or a private organization but not a member of the organization itself
+ // TODO: rename it to "IsOwnerVisibleToDoer"
if !organization.HasOrgOrUserVisible(ctx, repo.Owner, user) && !isCollaborator {
perm.AccessMode = perm_model.AccessModeNone
return perm, nil
@@ -272,7 +305,7 @@ func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use
return perm, nil
}
- // plain user
+ // plain user TODO: this check should be replaced, only need to check collaborator access mode
perm.AccessMode, err = accessLevel(ctx, user, repo)
if err != nil {
return perm, err
@@ -282,6 +315,19 @@ func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use
return perm, nil
}
+ // now: the owner is visible to doer, if the repo is public, then the min access mode is read
+ minAccessMode := util.Iif(!repo.IsPrivate && !user.IsRestricted, perm_model.AccessModeRead, perm_model.AccessModeNone)
+ perm.AccessMode = max(perm.AccessMode, minAccessMode)
+
+ // get units mode from teams
+ teams, err := organization.GetUserRepoTeams(ctx, repo.OwnerID, user.ID, repo.ID)
+ if err != nil {
+ return perm, err
+ }
+ if len(teams) == 0 {
+ return perm, nil
+ }
+
perm.unitsMode = make(map[unit.Type]perm_model.AccessMode)
// Collaborators on organization
@@ -291,15 +337,9 @@ func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use
}
}
- // get units mode from teams
- teams, err := organization.GetUserRepoTeams(ctx, repo.OwnerID, user.ID, repo.ID)
- if err != nil {
- return perm, err
- }
-
// if user in an owner team
for _, team := range teams {
- if team.AccessMode >= perm_model.AccessModeAdmin {
+ if team.HasAdminAccess() {
perm.AccessMode = perm_model.AccessModeOwner
perm.unitsMode = nil
return perm, nil
@@ -307,19 +347,12 @@ func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use
}
for _, u := range repo.Units {
- var found bool
for _, team := range teams {
+ unitAccessMode := minAccessMode
if teamMode, exist := team.UnitAccessModeEx(ctx, u.Type); exist {
- perm.unitsMode[u.Type] = max(perm.unitsMode[u.Type], teamMode)
- found = true
- }
- }
-
- // for a public repo on an organization, a non-restricted user has read permission on non-team defined units.
- if !found && !repo.IsPrivate && !user.IsRestricted {
- if _, ok := perm.unitsMode[u.Type]; !ok {
- perm.unitsMode[u.Type] = perm_model.AccessModeRead
+ unitAccessMode = max(perm.unitsMode[u.Type], unitAccessMode, teamMode)
}
+ perm.unitsMode[u.Type] = unitAccessMode
}
}
@@ -367,7 +400,7 @@ func IsUserRepoAdmin(ctx context.Context, repo *repo_model.Repository, user *use
}
for _, team := range teams {
- if team.AccessMode >= perm_model.AccessModeAdmin {
+ if team.HasAdminAccess() {
return true, nil
}
}
@@ -376,13 +409,13 @@ func IsUserRepoAdmin(ctx context.Context, repo *repo_model.Repository, user *use
// AccessLevel returns the Access a user has to a repository. Will return NoneAccess if the
// user does not have access.
-func AccessLevel(ctx context.Context, user *user_model.User, repo *repo_model.Repository) (perm_model.AccessMode, error) { //nolint
+func AccessLevel(ctx context.Context, user *user_model.User, repo *repo_model.Repository) (perm_model.AccessMode, error) { //nolint:revive // export stutter
return AccessLevelUnit(ctx, user, repo, unit.TypeCode)
}
// AccessLevelUnit returns the Access a user has to a repository's. Will return NoneAccess if the
// user does not have access.
-func AccessLevelUnit(ctx context.Context, user *user_model.User, repo *repo_model.Repository, unitType unit.Type) (perm_model.AccessMode, error) { //nolint
+func AccessLevelUnit(ctx context.Context, user *user_model.User, repo *repo_model.Repository, unitType unit.Type) (perm_model.AccessMode, error) { //nolint:revive // export stutter
perm, err := GetUserRepoPermission(ctx, repo, user)
if err != nil {
return perm_model.AccessModeNone, err
@@ -490,3 +523,7 @@ func CheckRepoUnitUser(ctx context.Context, repo *repo_model.Repository, user *u
return perm.CanRead(unitType)
}
+
+func PermissionNoAccess() Permission {
+ return Permission{AccessMode: perm_model.AccessModeNone}
+}
diff --git a/models/perm/access/repo_permission_test.go b/models/perm/access/repo_permission_test.go
index 9862da0673..c8675b1ded 100644
--- a/models/perm/access/repo_permission_test.go
+++ b/models/perm/access/repo_permission_test.go
@@ -6,12 +6,16 @@ package access
import (
"testing"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
perm_model "code.gitea.io/gitea/models/perm"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
"github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
)
func TestHasAnyUnitAccess(t *testing.T) {
@@ -22,14 +26,21 @@ func TestHasAnyUnitAccess(t *testing.T) {
units: []*repo_model.RepoUnit{{Type: unit.TypeWiki}},
}
assert.False(t, perm.HasAnyUnitAccess())
- assert.False(t, perm.HasAnyUnitAccessOrEveryoneAccess())
+ assert.False(t, perm.HasAnyUnitAccessOrPublicAccess())
perm = Permission{
units: []*repo_model.RepoUnit{{Type: unit.TypeWiki}},
everyoneAccessMode: map[unit.Type]perm_model.AccessMode{unit.TypeIssues: perm_model.AccessModeRead},
}
assert.False(t, perm.HasAnyUnitAccess())
- assert.True(t, perm.HasAnyUnitAccessOrEveryoneAccess())
+ assert.True(t, perm.HasAnyUnitAccessOrPublicAccess())
+
+ perm = Permission{
+ units: []*repo_model.RepoUnit{{Type: unit.TypeWiki}},
+ anonymousAccessMode: map[unit.Type]perm_model.AccessMode{unit.TypeIssues: perm_model.AccessModeRead},
+ }
+ assert.False(t, perm.HasAnyUnitAccess())
+ assert.True(t, perm.HasAnyUnitAccessOrPublicAccess())
perm = Permission{
AccessMode: perm_model.AccessModeRead,
@@ -43,7 +54,7 @@ func TestHasAnyUnitAccess(t *testing.T) {
assert.True(t, perm.HasAnyUnitAccess())
}
-func TestApplyEveryoneRepoPermission(t *testing.T) {
+func TestApplyPublicAccessRepoPermission(t *testing.T) {
perm := Permission{
AccessMode: perm_model.AccessModeNone,
units: []*repo_model.RepoUnit{
@@ -56,6 +67,15 @@ func TestApplyEveryoneRepoPermission(t *testing.T) {
perm = Permission{
AccessMode: perm_model.AccessModeNone,
units: []*repo_model.RepoUnit{
+ {Type: unit.TypeWiki, AnonymousAccessMode: perm_model.AccessModeRead},
+ },
+ }
+ finalProcessRepoUnitPermission(nil, &perm)
+ assert.True(t, perm.CanRead(unit.TypeWiki))
+
+ perm = Permission{
+ AccessMode: perm_model.AccessModeNone,
+ units: []*repo_model.RepoUnit{
{Type: unit.TypeWiki, EveryoneAccessMode: perm_model.AccessModeRead},
},
}
@@ -136,3 +156,45 @@ func TestUnitAccessMode(t *testing.T) {
}
assert.Equal(t, perm_model.AccessModeRead, perm.UnitAccessMode(unit.TypeWiki), "has unit, and map, use map")
}
+
+func TestGetUserRepoPermission(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+ ctx := t.Context()
+ repo32 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 32}) // org public repo
+ require.NoError(t, repo32.LoadOwner(ctx))
+ require.True(t, repo32.Owner.IsOrganization())
+
+ require.NoError(t, db.TruncateBeans(ctx, &organization.Team{}, &organization.TeamUser{}, &organization.TeamRepo{}, &organization.TeamUnit{}))
+ org := repo32.Owner
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4})
+ team := &organization.Team{OrgID: org.ID, LowerName: "test_team"}
+ require.NoError(t, db.Insert(ctx, team))
+
+ t.Run("DoerInTeamWithNoRepo", func(t *testing.T) {
+ require.NoError(t, db.Insert(ctx, &organization.TeamUser{OrgID: org.ID, TeamID: team.ID, UID: user.ID}))
+ perm, err := GetUserRepoPermission(ctx, repo32, user)
+ require.NoError(t, err)
+ assert.Equal(t, perm_model.AccessModeRead, perm.AccessMode)
+ assert.Nil(t, perm.unitsMode) // doer in the team, but has no access to the repo
+ })
+
+ require.NoError(t, db.Insert(ctx, &organization.TeamRepo{OrgID: org.ID, TeamID: team.ID, RepoID: repo32.ID}))
+ require.NoError(t, db.Insert(ctx, &organization.TeamUnit{OrgID: org.ID, TeamID: team.ID, Type: unit.TypeCode, AccessMode: perm_model.AccessModeNone}))
+ t.Run("DoerWithTeamUnitAccessNone", func(t *testing.T) {
+ perm, err := GetUserRepoPermission(ctx, repo32, user)
+ require.NoError(t, err)
+ assert.Equal(t, perm_model.AccessModeRead, perm.AccessMode)
+ assert.Equal(t, perm_model.AccessModeRead, perm.unitsMode[unit.TypeCode])
+ assert.Equal(t, perm_model.AccessModeRead, perm.unitsMode[unit.TypeIssues])
+ })
+
+ require.NoError(t, db.TruncateBeans(ctx, &organization.TeamUnit{}))
+ require.NoError(t, db.Insert(ctx, &organization.TeamUnit{OrgID: org.ID, TeamID: team.ID, Type: unit.TypeCode, AccessMode: perm_model.AccessModeWrite}))
+ t.Run("DoerWithTeamUnitAccessWrite", func(t *testing.T) {
+ perm, err := GetUserRepoPermission(ctx, repo32, user)
+ require.NoError(t, err)
+ assert.Equal(t, perm_model.AccessModeRead, perm.AccessMode)
+ assert.Equal(t, perm_model.AccessModeWrite, perm.unitsMode[unit.TypeCode])
+ assert.Equal(t, perm_model.AccessModeRead, perm.unitsMode[unit.TypeIssues])
+ })
+}
diff --git a/models/project/column.go b/models/project/column.go
index 77ff5ef83e..9b9d874997 100644
--- a/models/project/column.go
+++ b/models/project/column.go
@@ -147,7 +147,7 @@ func NewColumn(ctx context.Context, column *Column) error {
return err
}
if res.ColumnCount >= maxProjectColumns {
- return fmt.Errorf("NewBoard: maximum number of columns reached")
+ return errors.New("NewBoard: maximum number of columns reached")
}
column.Sorting = int8(util.Iif(res.ColumnCount > 0, res.MaxSorting+1, 0))
_, err := db.GetEngine(ctx).Insert(column)
@@ -172,7 +172,7 @@ func deleteColumnByID(ctx context.Context, columnID int64) error {
}
if column.Default {
- return fmt.Errorf("deleteColumnByID: cannot delete default column")
+ return errors.New("deleteColumnByID: cannot delete default column")
}
// move all issues to the default column
diff --git a/models/project/column_test.go b/models/project/column_test.go
index 66db23a3e4..6a615090a5 100644
--- a/models/project/column_test.go
+++ b/models/project/column_test.go
@@ -97,9 +97,9 @@ func Test_MoveColumnsOnProject(t *testing.T) {
columnsAfter, err := project1.GetColumns(db.DefaultContext)
assert.NoError(t, err)
assert.Len(t, columnsAfter, 3)
- assert.EqualValues(t, columns[1].ID, columnsAfter[0].ID)
- assert.EqualValues(t, columns[2].ID, columnsAfter[1].ID)
- assert.EqualValues(t, columns[0].ID, columnsAfter[2].ID)
+ assert.Equal(t, columns[1].ID, columnsAfter[0].ID)
+ assert.Equal(t, columns[2].ID, columnsAfter[1].ID)
+ assert.Equal(t, columns[0].ID, columnsAfter[2].ID)
}
func Test_NewColumn(t *testing.T) {
@@ -110,7 +110,7 @@ func Test_NewColumn(t *testing.T) {
assert.NoError(t, err)
assert.Len(t, columns, 3)
- for i := 0; i < maxProjectColumns-3; i++ {
+ for i := range maxProjectColumns - 3 {
err := NewColumn(db.DefaultContext, &Column{
Title: fmt.Sprintf("column-%d", i+4),
ProjectID: project1.ID,
diff --git a/models/project/issue.go b/models/project/issue.go
index 98eed2a213..47d1537ec7 100644
--- a/models/project/issue.go
+++ b/models/project/issue.go
@@ -5,7 +5,7 @@ package project
import (
"context"
- "fmt"
+ "errors"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/util"
@@ -35,7 +35,7 @@ func deleteProjectIssuesByProjectID(ctx context.Context, projectID int64) error
func (c *Column) moveIssuesToAnotherColumn(ctx context.Context, newColumn *Column) error {
if c.ProjectID != newColumn.ProjectID {
- return fmt.Errorf("columns have to be in the same project")
+ return errors.New("columns have to be in the same project")
}
if c.ID == newColumn.ID {
diff --git a/models/project/project.go b/models/project/project.go
index d27e053094..c003664fa3 100644
--- a/models/project/project.go
+++ b/models/project/project.go
@@ -129,11 +129,11 @@ func (p *Project) LoadRepo(ctx context.Context) (err error) {
return err
}
-func ProjectLinkForOrg(org *user_model.User, projectID int64) string { //nolint
+func ProjectLinkForOrg(org *user_model.User, projectID int64) string { //nolint:revive // export stutter
return fmt.Sprintf("%s/-/projects/%d", org.HomeLink(), projectID)
}
-func ProjectLinkForRepo(repo *repo_model.Repository, projectID int64) string { //nolint
+func ProjectLinkForRepo(repo *repo_model.Repository, projectID int64) string { //nolint:revive // export stutter
return fmt.Sprintf("%s/projects/%d", repo.Link(), projectID)
}
@@ -359,41 +359,25 @@ func updateRepositoryProjectCount(ctx context.Context, repoID int64) error {
// ChangeProjectStatusByRepoIDAndID toggles a project between opened and closed
func ChangeProjectStatusByRepoIDAndID(ctx context.Context, repoID, projectID int64, isClosed bool) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- p := new(Project)
-
- has, err := db.GetEngine(ctx).ID(projectID).Where("repo_id = ?", repoID).Get(p)
- if err != nil {
- return err
- } else if !has {
- return ErrProjectNotExist{ID: projectID, RepoID: repoID}
- }
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ p := new(Project)
- if err := changeProjectStatus(ctx, p, isClosed); err != nil {
- return err
- }
+ has, err := db.GetEngine(ctx).ID(projectID).Where("repo_id = ?", repoID).Get(p)
+ if err != nil {
+ return err
+ } else if !has {
+ return ErrProjectNotExist{ID: projectID, RepoID: repoID}
+ }
- return committer.Commit()
+ return changeProjectStatus(ctx, p, isClosed)
+ })
}
// ChangeProjectStatus toggle a project between opened and closed
func ChangeProjectStatus(ctx context.Context, p *Project, isClosed bool) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- if err := changeProjectStatus(ctx, p, isClosed); err != nil {
- return err
- }
-
- return committer.Commit()
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ return changeProjectStatus(ctx, p, isClosed)
+ })
}
func changeProjectStatus(ctx context.Context, p *Project, isClosed bool) error {
diff --git a/models/project/project_test.go b/models/project/project_test.go
index dd421b4659..c2e924e8ae 100644
--- a/models/project/project_test.go
+++ b/models/project/project_test.go
@@ -113,10 +113,10 @@ func TestProjectsSort(t *testing.T) {
OrderBy: GetSearchOrderByBySortType(tt.sortType),
})
assert.NoError(t, err)
- assert.EqualValues(t, int64(6), count)
+ assert.Equal(t, int64(6), count)
if assert.Len(t, projects, 6) {
for i := range projects {
- assert.EqualValues(t, tt.wants[i], projects[i].ID)
+ assert.Equal(t, tt.wants[i], projects[i].ID)
}
}
}
diff --git a/models/pull/automerge.go b/models/pull/automerge.go
index 3cafacc3a4..7f940a9849 100644
--- a/models/pull/automerge.go
+++ b/models/pull/automerge.go
@@ -5,12 +5,14 @@ package pull
import (
"context"
+ "errors"
"fmt"
"code.gitea.io/gitea/models/db"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
)
// AutoMerge represents a pull request scheduled for merging when checks succeed
@@ -76,7 +78,10 @@ func GetScheduledMergeByPullID(ctx context.Context, pullID int64) (bool, *AutoMe
return false, nil, err
}
- doer, err := user_model.GetUserByID(ctx, scheduledPRM.DoerID)
+ doer, err := user_model.GetPossibleUserByID(ctx, scheduledPRM.DoerID)
+ if errors.Is(err, util.ErrNotExist) {
+ doer, err = user_model.NewGhostUser(), nil
+ }
if err != nil {
return false, nil, err
}
diff --git a/models/pull/review_state.go b/models/pull/review_state.go
index e46a22a49d..137af00eab 100644
--- a/models/pull/review_state.go
+++ b/models/pull/review_state.go
@@ -6,6 +6,7 @@ package pull
import (
"context"
"fmt"
+ "maps"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/log"
@@ -100,9 +101,7 @@ func mergeFiles(oldFiles, newFiles map[string]ViewedState) map[string]ViewedStat
return oldFiles
}
- for file, viewed := range newFiles {
- oldFiles[file] = viewed
- }
+ maps.Copy(oldFiles, newFiles)
return oldFiles
}
diff --git a/models/renderhelper/commit_checker.go b/models/renderhelper/commit_checker.go
index 4815643e67..407e45fb54 100644
--- a/models/renderhelper/commit_checker.go
+++ b/models/renderhelper/commit_checker.go
@@ -47,7 +47,7 @@ func (c *commitChecker) IsCommitIDExisting(commitID string) bool {
c.gitRepo, c.gitRepoCloser = r, closer
}
- exist = c.gitRepo.IsReferenceExist(commitID) // Don't use IsObjectExist since it doesn't support short hashs with gogit edition.
+ exist = c.gitRepo.IsReferenceExist(commitID) // Don't use IsObjectExist since it doesn't support short hashes with gogit edition.
c.commitCache[commitID] = exist
return exist
}
diff --git a/models/renderhelper/repo_comment.go b/models/renderhelper/repo_comment.go
index 6bd5e91ad1..ae0fbf0abd 100644
--- a/models/renderhelper/repo_comment.go
+++ b/models/renderhelper/repo_comment.go
@@ -28,14 +28,14 @@ func (r *RepoComment) IsCommitIDExisting(commitID string) bool {
return r.commitChecker.IsCommitIDExisting(commitID)
}
-func (r *RepoComment) ResolveLink(link string, likeType markup.LinkType) (finalLink string) {
- switch likeType {
- case markup.LinkTypeApp:
- finalLink = r.ctx.ResolveLinkApp(link)
+func (r *RepoComment) ResolveLink(link, preferLinkType string) string {
+ linkType, link := markup.ParseRenderedLink(link, preferLinkType)
+ switch linkType {
+ case markup.LinkTypeRoot:
+ return r.ctx.ResolveLinkRoot(link)
default:
- finalLink = r.ctx.ResolveLinkRelative(r.repoLink, r.opts.CurrentRefPath, link)
+ return r.ctx.ResolveLinkRelative(r.repoLink, r.opts.CurrentRefPath, link)
}
- return finalLink
}
var _ markup.RenderHelper = (*RepoComment)(nil)
@@ -44,30 +44,31 @@ type RepoCommentOptions struct {
DeprecatedRepoName string // it is only a patch for the non-standard "markup" api
DeprecatedOwnerName string // it is only a patch for the non-standard "markup" api
CurrentRefPath string // eg: "branch/main" or "commit/11223344"
+ FootnoteContextID string // the extra context ID for footnotes, used to avoid conflicts with other footnotes in the same page
}
func NewRenderContextRepoComment(ctx context.Context, repo *repo_model.Repository, opts ...RepoCommentOptions) *markup.RenderContext {
- helper := &RepoComment{
- repoLink: repo.Link(),
- opts: util.OptionalArg(opts),
- }
+ helper := &RepoComment{opts: util.OptionalArg(opts)}
rctx := markup.NewRenderContext(ctx)
helper.ctx = rctx
+ var metas map[string]string
if repo != nil {
helper.repoLink = repo.Link()
helper.commitChecker = newCommitChecker(ctx, repo)
- rctx = rctx.WithMetas(repo.ComposeMetas(ctx))
+ metas = repo.ComposeCommentMetas(ctx)
} else {
- // this is almost dead code, only to pass the incorrect tests
- helper.repoLink = fmt.Sprintf("%s/%s", helper.opts.DeprecatedOwnerName, helper.opts.DeprecatedRepoName)
- rctx = rctx.WithMetas(map[string]string{
- "user": helper.opts.DeprecatedOwnerName,
- "repo": helper.opts.DeprecatedRepoName,
-
- "markdownLineBreakStyle": "comment",
- "markupAllowShortIssuePattern": "true",
- })
+ // repo can be nil when rendering a commit message in user's dashboard feedback whose repository has been deleted
+ metas = map[string]string{}
+ if helper.opts.DeprecatedOwnerName != "" {
+ // this is almost dead code, only to pass the incorrect tests
+ helper.repoLink = fmt.Sprintf("%s/%s", helper.opts.DeprecatedOwnerName, helper.opts.DeprecatedRepoName)
+ metas["user"] = helper.opts.DeprecatedOwnerName
+ metas["repo"] = helper.opts.DeprecatedRepoName
+ }
+ metas["markdownNewLineHardBreak"] = "true"
+ metas["markupAllowShortIssuePattern"] = "true"
}
- rctx = rctx.WithHelper(helper)
+ metas["footnoteContextId"] = helper.opts.FootnoteContextID
+ rctx = rctx.WithMetas(metas).WithHelper(helper)
return rctx
}
diff --git a/models/renderhelper/repo_comment_test.go b/models/renderhelper/repo_comment_test.go
index 776152db96..3b13bff73c 100644
--- a/models/renderhelper/repo_comment_test.go
+++ b/models/renderhelper/repo_comment_test.go
@@ -72,4 +72,11 @@ func TestRepoComment(t *testing.T) {
<a href="/user2/repo1/commit/1234/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/commit/1234/image" alt="./image"/></a></p>
`, rendered)
})
+
+ t.Run("NoRepo", func(t *testing.T) {
+ rctx := NewRenderContextRepoComment(t.Context(), nil).WithMarkupType(markdown.MarkupName)
+ rendered, err := markup.RenderString(rctx, "any")
+ assert.NoError(t, err)
+ assert.Equal(t, "<p>any</p>\n", rendered)
+ })
}
diff --git a/models/renderhelper/repo_file.go b/models/renderhelper/repo_file.go
index 794828c617..e0375ed280 100644
--- a/models/renderhelper/repo_file.go
+++ b/models/renderhelper/repo_file.go
@@ -29,17 +29,17 @@ func (r *RepoFile) IsCommitIDExisting(commitID string) bool {
return r.commitChecker.IsCommitIDExisting(commitID)
}
-func (r *RepoFile) ResolveLink(link string, likeType markup.LinkType) string {
- finalLink := link
- switch likeType {
- case markup.LinkTypeApp:
- finalLink = r.ctx.ResolveLinkApp(link)
- case markup.LinkTypeDefault:
- finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "src", r.opts.CurrentRefPath), r.opts.CurrentTreePath, link)
+func (r *RepoFile) ResolveLink(link, preferLinkType string) (finalLink string) {
+ linkType, link := markup.ParseRenderedLink(link, preferLinkType)
+ switch linkType {
+ case markup.LinkTypeRoot:
+ finalLink = r.ctx.ResolveLinkRoot(link)
case markup.LinkTypeRaw:
finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "raw", r.opts.CurrentRefPath), r.opts.CurrentTreePath, link)
case markup.LinkTypeMedia:
finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "media", r.opts.CurrentRefPath), r.opts.CurrentTreePath, link)
+ default:
+ finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "src", r.opts.CurrentRefPath), r.opts.CurrentTreePath, link)
}
return finalLink
}
@@ -61,15 +61,13 @@ func NewRenderContextRepoFile(ctx context.Context, repo *repo_model.Repository,
if repo != nil {
helper.repoLink = repo.Link()
helper.commitChecker = newCommitChecker(ctx, repo)
- rctx = rctx.WithMetas(repo.ComposeDocumentMetas(ctx))
+ rctx = rctx.WithMetas(repo.ComposeRepoFileMetas(ctx))
} else {
// this is almost dead code, only to pass the incorrect tests
helper.repoLink = fmt.Sprintf("%s/%s", helper.opts.DeprecatedOwnerName, helper.opts.DeprecatedRepoName)
rctx = rctx.WithMetas(map[string]string{
"user": helper.opts.DeprecatedOwnerName,
"repo": helper.opts.DeprecatedRepoName,
-
- "markdownLineBreakStyle": "document",
})
}
rctx = rctx.WithHelper(helper)
diff --git a/models/renderhelper/repo_file_test.go b/models/renderhelper/repo_file_test.go
index 29cb45f6f7..3b48efba3a 100644
--- a/models/renderhelper/repo_file_test.go
+++ b/models/renderhelper/repo_file_test.go
@@ -48,8 +48,8 @@ func TestRepoFile(t *testing.T) {
assert.Equal(t,
`<p><a href="/user2/repo1/src/branch/main/test" rel="nofollow">/test</a>
<a href="/user2/repo1/src/branch/main/test" rel="nofollow">./test</a>
-<a href="/user2/repo1/media/branch/main/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/media/branch/main/image" alt="/image"/></a>
-<a href="/user2/repo1/media/branch/main/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/media/branch/main/image" alt="./image"/></a></p>
+<a href="/user2/repo1/src/branch/main/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/media/branch/main/image" alt="/image"/></a>
+<a href="/user2/repo1/src/branch/main/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/media/branch/main/image" alt="./image"/></a></p>
`, rendered)
})
@@ -62,7 +62,7 @@ func TestRepoFile(t *testing.T) {
`)
assert.NoError(t, err)
assert.Equal(t, `<p><a href="/user2/repo1/src/commit/1234/test" rel="nofollow">/test</a>
-<a href="/user2/repo1/media/commit/1234/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/media/commit/1234/image" alt="/image"/></a></p>
+<a href="/user2/repo1/src/commit/1234/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/media/commit/1234/image" alt="/image"/></a></p>
`, rendered)
})
@@ -77,7 +77,7 @@ func TestRepoFile(t *testing.T) {
<video src="LINK">
`)
assert.NoError(t, err)
- assert.Equal(t, `<a href="/user2/repo1/media/commit/1234/my-dir/LINK" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/media/commit/1234/my-dir/LINK"/></a>
+ assert.Equal(t, `<a href="/user2/repo1/src/commit/1234/my-dir/LINK" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/media/commit/1234/my-dir/LINK"/></a>
<video src="/user2/repo1/media/commit/1234/my-dir/LINK">
</video>`, rendered)
})
@@ -100,7 +100,7 @@ func TestRepoFileOrgMode(t *testing.T) {
assert.NoError(t, err)
assert.Equal(t, `<p>
<a href="https://google.com/" rel="nofollow">https://google.com/</a>
-<a href="/user2/repo1/media/commit/1234/my-dir/ImageLink.svg" rel="nofollow">The Image Desc</a></p>
+<a href="/user2/repo1/src/commit/1234/my-dir/ImageLink.svg" rel="nofollow">The Image Desc</a></p>
`, rendered)
})
diff --git a/models/renderhelper/repo_wiki.go b/models/renderhelper/repo_wiki.go
index aa456bf6ce..b75f1b9701 100644
--- a/models/renderhelper/repo_wiki.go
+++ b/models/renderhelper/repo_wiki.go
@@ -30,18 +30,16 @@ func (r *RepoWiki) IsCommitIDExisting(commitID string) bool {
return r.commitChecker.IsCommitIDExisting(commitID)
}
-func (r *RepoWiki) ResolveLink(link string, likeType markup.LinkType) string {
- finalLink := link
- switch likeType {
- case markup.LinkTypeApp:
- finalLink = r.ctx.ResolveLinkApp(link)
- case markup.LinkTypeDefault:
- finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "wiki", r.opts.currentRefPath), r.opts.currentTreePath, link)
- case markup.LinkTypeMedia:
+func (r *RepoWiki) ResolveLink(link, preferLinkType string) (finalLink string) {
+ linkType, link := markup.ParseRenderedLink(link, preferLinkType)
+ switch linkType {
+ case markup.LinkTypeRoot:
+ finalLink = r.ctx.ResolveLinkRoot(link)
+ case markup.LinkTypeMedia, markup.LinkTypeRaw:
finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "wiki/raw", r.opts.currentRefPath), r.opts.currentTreePath, link)
- case markup.LinkTypeRaw: // wiki doesn't use it
+ default:
+ finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "wiki", r.opts.currentRefPath), r.opts.currentTreePath, link)
}
-
return finalLink
}
@@ -70,7 +68,6 @@ func NewRenderContextRepoWiki(ctx context.Context, repo *repo_model.Repository,
"user": helper.opts.DeprecatedOwnerName,
"repo": helper.opts.DeprecatedRepoName,
- "markdownLineBreakStyle": "document",
"markupAllowShortIssuePattern": "true",
})
}
diff --git a/models/renderhelper/repo_wiki_test.go b/models/renderhelper/repo_wiki_test.go
index b24508f1f2..4f6da541a5 100644
--- a/models/renderhelper/repo_wiki_test.go
+++ b/models/renderhelper/repo_wiki_test.go
@@ -45,8 +45,8 @@ func TestRepoWiki(t *testing.T) {
assert.Equal(t,
`<p><a href="/user2/repo1/wiki/test" rel="nofollow">/test</a>
<a href="/user2/repo1/wiki/test" rel="nofollow">./test</a>
-<a href="/user2/repo1/wiki/raw/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/wiki/raw/image" alt="/image"/></a>
-<a href="/user2/repo1/wiki/raw/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/wiki/raw/image" alt="./image"/></a></p>
+<a href="/user2/repo1/wiki/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/wiki/raw/image" alt="/image"/></a>
+<a href="/user2/repo1/wiki/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/wiki/raw/image" alt="./image"/></a></p>
`, rendered)
})
@@ -57,7 +57,7 @@ func TestRepoWiki(t *testing.T) {
<video src="LINK">
`)
assert.NoError(t, err)
- assert.Equal(t, `<a href="/user2/repo1/wiki/raw/LINK" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/wiki/raw/LINK"/></a>
+ assert.Equal(t, `<a href="/user2/repo1/wiki/LINK" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/wiki/raw/LINK"/></a>
<video src="/user2/repo1/wiki/raw/LINK">
</video>`, rendered)
})
diff --git a/models/renderhelper/simple_document.go b/models/renderhelper/simple_document.go
index 91d888aa87..9b3dacaea3 100644
--- a/models/renderhelper/simple_document.go
+++ b/models/renderhelper/simple_document.go
@@ -15,8 +15,14 @@ type SimpleDocument struct {
baseLink string
}
-func (r *SimpleDocument) ResolveLink(link string, likeType markup.LinkType) string {
- return r.ctx.ResolveLinkRelative(r.baseLink, "", link)
+func (r *SimpleDocument) ResolveLink(link, preferLinkType string) string {
+ linkType, link := markup.ParseRenderedLink(link, preferLinkType)
+ switch linkType {
+ case markup.LinkTypeRoot:
+ return r.ctx.ResolveLinkRoot(link)
+ default:
+ return r.ctx.ResolveLinkRelative(r.baseLink, "", link)
+ }
}
var _ markup.RenderHelper = (*SimpleDocument)(nil)
diff --git a/models/renderhelper/simple_document_test.go b/models/renderhelper/simple_document_test.go
index 908e640f9c..890592860a 100644
--- a/models/renderhelper/simple_document_test.go
+++ b/models/renderhelper/simple_document_test.go
@@ -30,7 +30,7 @@ func TestSimpleDocument(t *testing.T) {
assert.Equal(t,
`<p>65f1bf27bc3bf70f64657658635e66094edbcb4d
#1
-<a href="/base/user2" rel="nofollow">@user2</a></p>
+<a href="/user2" rel="nofollow">@user2</a></p>
<p><a href="/base/test" rel="nofollow">/test</a>
<a href="/base/test" rel="nofollow">./test</a>
<a href="/base/image" target="_blank" rel="nofollow noopener"><img src="/base/image" alt="/image"/></a>
diff --git a/models/repo.go b/models/repo.go
index 9bc67079a9..522debb9fe 100644
--- a/models/repo.go
+++ b/models/repo.go
@@ -290,19 +290,14 @@ func UpdateRepoStats(ctx context.Context, id int64) error {
}
func updateUserStarNumbers(ctx context.Context, users []user_model.User) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- for _, user := range users {
- if _, err = db.Exec(ctx, "UPDATE `user` SET num_stars=(SELECT COUNT(*) FROM `star` WHERE uid=?) WHERE id=?", user.ID, user.ID); err != nil {
- return err
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ for _, user := range users {
+ if _, err := db.Exec(ctx, "UPDATE `user` SET num_stars=(SELECT COUNT(*) FROM `star` WHERE uid=?) WHERE id=?", user.ID, user.ID); err != nil {
+ return err
+ }
}
- }
-
- return committer.Commit()
+ return nil
+ })
}
// DoctorUserStarNum recalculate Stars number for all user
diff --git a/models/repo/attachment.go b/models/repo/attachment.go
index fa4f6c47e6..835bee5402 100644
--- a/models/repo/attachment.go
+++ b/models/repo/attachment.go
@@ -224,7 +224,7 @@ func DeleteAttachmentsByComment(ctx context.Context, commentID int64, remove boo
// UpdateAttachmentByUUID Updates attachment via uuid
func UpdateAttachmentByUUID(ctx context.Context, attach *Attachment, cols ...string) error {
if attach.UUID == "" {
- return fmt.Errorf("attachment uuid should be not blank")
+ return errors.New("attachment uuid should be not blank")
}
_, err := db.GetEngine(ctx).Where("uuid=?", attach.UUID).Cols(cols...).Update(attach)
return err
diff --git a/models/repo/avatar.go b/models/repo/avatar.go
index ccfac12cad..eff64bd239 100644
--- a/models/repo/avatar.go
+++ b/models/repo/avatar.go
@@ -9,6 +9,7 @@ import (
"image/png"
"io"
"net/url"
+ "strconv"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/avatar"
@@ -37,7 +38,7 @@ func (repo *Repository) RelAvatarLink(ctx context.Context) string {
// generateRandomAvatar generates a random avatar for repository.
func generateRandomAvatar(ctx context.Context, repo *Repository) error {
- idToString := fmt.Sprintf("%d", repo.ID)
+ idToString := strconv.FormatInt(repo.ID, 10)
seed := idToString
img, err := avatar.RandomImage([]byte(seed))
diff --git a/models/repo/collaboration_test.go b/models/repo/collaboration_test.go
index 639050f5fd..7b07dbffdf 100644
--- a/models/repo/collaboration_test.go
+++ b/models/repo/collaboration_test.go
@@ -25,8 +25,8 @@ func TestRepository_GetCollaborators(t *testing.T) {
assert.NoError(t, err)
assert.Len(t, collaborators, int(expectedLen))
for _, collaborator := range collaborators {
- assert.EqualValues(t, collaborator.User.ID, collaborator.Collaboration.UserID)
- assert.EqualValues(t, repoID, collaborator.Collaboration.RepoID)
+ assert.Equal(t, collaborator.User.ID, collaborator.Collaboration.UserID)
+ assert.Equal(t, repoID, collaborator.Collaboration.RepoID)
}
}
test(1)
@@ -51,7 +51,7 @@ func TestRepository_GetCollaborators(t *testing.T) {
assert.NoError(t, err)
assert.Len(t, collaborators2, 1)
- assert.NotEqualValues(t, collaborators1[0].ID, collaborators2[0].ID)
+ assert.NotEqual(t, collaborators1[0].ID, collaborators2[0].ID)
}
func TestRepository_IsCollaborator(t *testing.T) {
@@ -76,10 +76,10 @@ func TestRepository_ChangeCollaborationAccessMode(t *testing.T) {
assert.NoError(t, repo_model.ChangeCollaborationAccessMode(db.DefaultContext, repo, 4, perm.AccessModeAdmin))
collaboration := unittest.AssertExistsAndLoadBean(t, &repo_model.Collaboration{RepoID: repo.ID, UserID: 4})
- assert.EqualValues(t, perm.AccessModeAdmin, collaboration.Mode)
+ assert.Equal(t, perm.AccessModeAdmin, collaboration.Mode)
access := unittest.AssertExistsAndLoadBean(t, &access_model.Access{UserID: 4, RepoID: repo.ID})
- assert.EqualValues(t, perm.AccessModeAdmin, access.Mode)
+ assert.Equal(t, perm.AccessModeAdmin, access.Mode)
assert.NoError(t, repo_model.ChangeCollaborationAccessMode(db.DefaultContext, repo, 4, perm.AccessModeAdmin))
diff --git a/models/repo/language_stats.go b/models/repo/language_stats.go
index 0bc0f1fb40..1cddd25f1d 100644
--- a/models/repo/language_stats.go
+++ b/models/repo/language_stats.go
@@ -141,102 +141,90 @@ func GetTopLanguageStats(ctx context.Context, repo *Repository, limit int) (Lang
// UpdateLanguageStats updates the language statistics for repository
func UpdateLanguageStats(ctx context.Context, repo *Repository, commitID string, stats map[string]int64) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
- sess := db.GetEngine(ctx)
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ sess := db.GetEngine(ctx)
- oldstats, err := GetLanguageStats(ctx, repo)
- if err != nil {
- return err
- }
- var topLang string
- var s int64
- for lang, size := range stats {
- if size > s {
- s = size
- topLang = strings.ToLower(lang)
+ oldstats, err := GetLanguageStats(ctx, repo)
+ if err != nil {
+ return err
+ }
+ var topLang string
+ var s int64
+ for lang, size := range stats {
+ if size > s {
+ s = size
+ topLang = lang
+ }
}
- }
- for lang, size := range stats {
- upd := false
- llang := strings.ToLower(lang)
- for _, s := range oldstats {
- // Update already existing language
- if strings.ToLower(s.Language) == llang {
- s.CommitID = commitID
- s.IsPrimary = llang == topLang
- s.Size = size
- if _, err := sess.ID(s.ID).Cols("`commit_id`", "`size`", "`is_primary`").Update(s); err != nil {
+ for lang, size := range stats {
+ upd := false
+ for _, s := range oldstats {
+ // Update already existing language
+ if strings.EqualFold(s.Language, lang) {
+ s.CommitID = commitID
+ s.IsPrimary = lang == topLang
+ s.Size = size
+ if _, err := sess.ID(s.ID).Cols("`commit_id`", "`size`", "`is_primary`").Update(s); err != nil {
+ return err
+ }
+ upd = true
+ break
+ }
+ }
+ // Insert new language
+ if !upd {
+ if err := db.Insert(ctx, &LanguageStat{
+ RepoID: repo.ID,
+ CommitID: commitID,
+ IsPrimary: lang == topLang,
+ Language: lang,
+ Size: size,
+ }); err != nil {
return err
}
- upd = true
- break
}
}
- // Insert new language
- if !upd {
- if err := db.Insert(ctx, &LanguageStat{
- RepoID: repo.ID,
- CommitID: commitID,
- IsPrimary: llang == topLang,
- Language: lang,
- Size: size,
- }); err != nil {
- return err
+ // Delete old languages
+ statsToDelete := make([]int64, 0, len(oldstats))
+ for _, s := range oldstats {
+ if s.CommitID != commitID {
+ statsToDelete = append(statsToDelete, s.ID)
}
}
- }
- // Delete old languages
- statsToDelete := make([]int64, 0, len(oldstats))
- for _, s := range oldstats {
- if s.CommitID != commitID {
- statsToDelete = append(statsToDelete, s.ID)
- }
- }
- if len(statsToDelete) > 0 {
- if _, err := sess.In("`id`", statsToDelete).Delete(&LanguageStat{}); err != nil {
- return err
+ if len(statsToDelete) > 0 {
+ if _, err := sess.In("`id`", statsToDelete).Delete(&LanguageStat{}); err != nil {
+ return err
+ }
}
- }
- // Update indexer status
- if err = UpdateIndexerStatus(ctx, repo, RepoIndexerTypeStats, commitID); err != nil {
- return err
- }
-
- return committer.Commit()
+ // Update indexer status
+ return UpdateIndexerStatus(ctx, repo, RepoIndexerTypeStats, commitID)
+ })
}
// CopyLanguageStat Copy originalRepo language stat information to destRepo (use for forked repo)
func CopyLanguageStat(ctx context.Context, originalRepo, destRepo *Repository) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- RepoLang := make(LanguageStatList, 0, 6)
- if err := db.GetEngine(ctx).Where("`repo_id` = ?", originalRepo.ID).Desc("`size`").Find(&RepoLang); err != nil {
- return err
- }
- if len(RepoLang) > 0 {
- for i := range RepoLang {
- RepoLang[i].ID = 0
- RepoLang[i].RepoID = destRepo.ID
- RepoLang[i].CreatedUnix = timeutil.TimeStampNow()
- }
- // update destRepo's indexer status
- tmpCommitID := RepoLang[0].CommitID
- if err := UpdateIndexerStatus(ctx, destRepo, RepoIndexerTypeStats, tmpCommitID); err != nil {
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ RepoLang := make(LanguageStatList, 0, 6)
+ if err := db.GetEngine(ctx).Where("`repo_id` = ?", originalRepo.ID).Desc("`size`").Find(&RepoLang); err != nil {
return err
}
- if err := db.Insert(ctx, &RepoLang); err != nil {
- return err
+ if len(RepoLang) > 0 {
+ for i := range RepoLang {
+ RepoLang[i].ID = 0
+ RepoLang[i].RepoID = destRepo.ID
+ RepoLang[i].CreatedUnix = timeutil.TimeStampNow()
+ }
+ // update destRepo's indexer status
+ tmpCommitID := RepoLang[0].CommitID
+ if err := UpdateIndexerStatus(ctx, destRepo, RepoIndexerTypeStats, tmpCommitID); err != nil {
+ return err
+ }
+ if err := db.Insert(ctx, &RepoLang); err != nil {
+ return err
+ }
}
- }
- return committer.Commit()
+ return nil
+ })
}
diff --git a/models/repo/org_repo.go b/models/repo/org_repo.go
index fa519d25b1..96f21ba2ac 100644
--- a/models/repo/org_repo.go
+++ b/models/repo/org_repo.go
@@ -48,8 +48,7 @@ func GetTeamRepositories(ctx context.Context, opts *SearchTeamRepoOptions) (Repo
// accessible to a particular user
type AccessibleReposEnvironment interface {
CountRepos(ctx context.Context) (int64, error)
- RepoIDs(ctx context.Context, page, pageSize int) ([]int64, error)
- Repos(ctx context.Context, page, pageSize int) (RepositoryList, error)
+ RepoIDs(ctx context.Context) ([]int64, error)
MirrorRepos(ctx context.Context) (RepositoryList, error)
AddKeyword(keyword string)
SetSort(db.SearchOrderBy)
@@ -132,40 +131,18 @@ func (env *accessibleReposEnv) CountRepos(ctx context.Context) (int64, error) {
return repoCount, nil
}
-func (env *accessibleReposEnv) RepoIDs(ctx context.Context, page, pageSize int) ([]int64, error) {
- if page <= 0 {
- page = 1
- }
-
- repoIDs := make([]int64, 0, pageSize)
+func (env *accessibleReposEnv) RepoIDs(ctx context.Context) ([]int64, error) {
+ var repoIDs []int64
return repoIDs, db.GetEngine(ctx).
Table("repository").
Join("INNER", "team_repo", "`team_repo`.repo_id=`repository`.id").
Where(env.cond()).
- GroupBy("`repository`.id,`repository`."+strings.Fields(string(env.orderBy))[0]).
+ GroupBy("`repository`.id,`repository`." + strings.Fields(string(env.orderBy))[0]).
OrderBy(string(env.orderBy)).
- Limit(pageSize, (page-1)*pageSize).
Cols("`repository`.id").
Find(&repoIDs)
}
-func (env *accessibleReposEnv) Repos(ctx context.Context, page, pageSize int) (RepositoryList, error) {
- repoIDs, err := env.RepoIDs(ctx, page, pageSize)
- if err != nil {
- return nil, fmt.Errorf("GetUserRepositoryIDs: %w", err)
- }
-
- repos := make([]*Repository, 0, len(repoIDs))
- if len(repoIDs) == 0 {
- return repos, nil
- }
-
- return repos, db.GetEngine(ctx).
- In("`repository`.id", repoIDs).
- OrderBy(string(env.orderBy)).
- Find(&repos)
-}
-
func (env *accessibleReposEnv) MirrorRepoIDs(ctx context.Context) ([]int64, error) {
repoIDs := make([]int64, 0, 10)
return repoIDs, db.GetEngine(ctx).
diff --git a/models/repo/pushmirror_test.go b/models/repo/pushmirror_test.go
index e19749d93a..9fb7471147 100644
--- a/models/repo/pushmirror_test.go
+++ b/models/repo/pushmirror_test.go
@@ -39,8 +39,6 @@ func TestPushMirrorsIterate(t *testing.T) {
Interval: 0,
})
- time.Sleep(1 * time.Millisecond)
-
repo_model.PushMirrorsIterate(db.DefaultContext, 1, func(idx int, bean any) error {
m, ok := bean.(*repo_model.PushMirror)
assert.True(t, ok)
diff --git a/models/repo/release.go b/models/repo/release.go
index 1c2e4a48e3..0db57503ce 100644
--- a/models/repo/release.go
+++ b/models/repo/release.go
@@ -161,6 +161,11 @@ func UpdateRelease(ctx context.Context, rel *Release) error {
return err
}
+func UpdateReleaseNumCommits(ctx context.Context, rel *Release) error {
+ _, err := db.GetEngine(ctx).ID(rel.ID).Cols("num_commits").Update(rel)
+ return err
+}
+
// AddReleaseAttachments adds a release attachments
func AddReleaseAttachments(ctx context.Context, releaseID int64, attachmentUUIDs []string) (err error) {
// Check attachments
@@ -175,7 +180,7 @@ func AddReleaseAttachments(ctx context.Context, releaseID int64, attachmentUUIDs
}
attachments[i].ReleaseID = releaseID
// No assign value could be 0, so ignore AllCols().
- if _, err = db.GetEngine(ctx).ID(attachments[i].ID).Update(attachments[i]); err != nil {
+ if _, err = db.GetEngine(ctx).ID(attachments[i].ID).Cols("release_id").Update(attachments[i]); err != nil {
return fmt.Errorf("update attachment [%d]: %w", attachments[i].ID, err)
}
}
@@ -418,8 +423,8 @@ func UpdateReleasesMigrationsByType(ctx context.Context, gitServiceType structs.
return err
}
-// PushUpdateDeleteTagsContext updates a number of delete tags with context
-func PushUpdateDeleteTagsContext(ctx context.Context, repo *Repository, tags []string) error {
+// PushUpdateDeleteTags updates a number of delete tags with context
+func PushUpdateDeleteTags(ctx context.Context, repo *Repository, tags []string) error {
if len(tags) == 0 {
return nil
}
@@ -448,58 +453,6 @@ func PushUpdateDeleteTagsContext(ctx context.Context, repo *Repository, tags []s
return nil
}
-// PushUpdateDeleteTag must be called for any push actions to delete tag
-func PushUpdateDeleteTag(ctx context.Context, repo *Repository, tagName string) error {
- rel, err := GetRelease(ctx, repo.ID, tagName)
- if err != nil {
- if IsErrReleaseNotExist(err) {
- return nil
- }
- return fmt.Errorf("GetRelease: %w", err)
- }
- if rel.IsTag {
- if _, err = db.DeleteByID[Release](ctx, rel.ID); err != nil {
- return fmt.Errorf("Delete: %w", err)
- }
- } else {
- rel.IsDraft = true
- rel.NumCommits = 0
- rel.Sha1 = ""
- if _, err = db.GetEngine(ctx).ID(rel.ID).AllCols().Update(rel); err != nil {
- return fmt.Errorf("Update: %w", err)
- }
- }
-
- return nil
-}
-
-// SaveOrUpdateTag must be called for any push actions to add tag
-func SaveOrUpdateTag(ctx context.Context, repo *Repository, newRel *Release) error {
- rel, err := GetRelease(ctx, repo.ID, newRel.TagName)
- if err != nil && !IsErrReleaseNotExist(err) {
- return fmt.Errorf("GetRelease: %w", err)
- }
-
- if rel == nil {
- rel = newRel
- if _, err = db.GetEngine(ctx).Insert(rel); err != nil {
- return fmt.Errorf("InsertOne: %w", err)
- }
- } else {
- rel.Sha1 = newRel.Sha1
- rel.CreatedUnix = newRel.CreatedUnix
- rel.NumCommits = newRel.NumCommits
- rel.IsDraft = false
- if rel.IsTag && newRel.PublisherID > 0 {
- rel.PublisherID = newRel.PublisherID
- }
- if _, err = db.GetEngine(ctx).ID(rel.ID).AllCols().Update(rel); err != nil {
- return fmt.Errorf("Update: %w", err)
- }
- }
- return nil
-}
-
// RemapExternalUser ExternalUserRemappable interface
func (r *Release) RemapExternalUser(externalName string, externalID, userID int64) error {
r.OriginalAuthor = externalName
@@ -519,30 +472,24 @@ func (r *Release) GetExternalID() int64 { return r.OriginalAuthorID }
// InsertReleases migrates release
func InsertReleases(ctx context.Context, rels ...*Release) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
- sess := db.GetEngine(ctx)
-
- for _, rel := range rels {
- if _, err := sess.NoAutoTime().Insert(rel); err != nil {
- return err
- }
-
- if len(rel.Attachments) > 0 {
- for i := range rel.Attachments {
- rel.Attachments[i].ReleaseID = rel.ID
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ for _, rel := range rels {
+ if _, err := db.GetEngine(ctx).NoAutoTime().Insert(rel); err != nil {
+ return err
}
- if _, err := sess.NoAutoTime().Insert(rel.Attachments); err != nil {
- return err
+ if len(rel.Attachments) > 0 {
+ for i := range rel.Attachments {
+ rel.Attachments[i].ReleaseID = rel.ID
+ }
+
+ if _, err := db.GetEngine(ctx).NoAutoTime().Insert(rel.Attachments); err != nil {
+ return err
+ }
}
}
- }
-
- return committer.Commit()
+ return nil
+ })
}
func FindTagsByCommitIDs(ctx context.Context, repoID int64, commitIDs ...string) (map[string][]*Release, error) {
@@ -558,3 +505,8 @@ func FindTagsByCommitIDs(ctx context.Context, repoID int64, commitIDs ...string)
}
return res, nil
}
+
+func DeleteRepoReleases(ctx context.Context, repoID int64) error {
+ _, err := db.GetEngine(ctx).Where("repo_id = ?", repoID).Delete(new(Release))
+ return err
+}
diff --git a/models/repo/repo.go b/models/repo/repo.go
index 13473699f3..2403b3b40b 100644
--- a/models/repo/repo.go
+++ b/models/repo/repo.go
@@ -5,6 +5,7 @@ package repo
import (
"context"
+ "errors"
"fmt"
"html/template"
"maps"
@@ -59,22 +60,22 @@ type ErrRepoIsArchived struct {
}
func (err ErrRepoIsArchived) Error() string {
- return fmt.Sprintf("%s is archived", err.Repo.LogString())
+ return err.Repo.LogString() + " is archived"
}
type globalVarsStruct struct {
- validRepoNamePattern *regexp.Regexp
- invalidRepoNamePattern *regexp.Regexp
- reservedRepoNames []string
- reservedRepoPatterns []string
+ validRepoNamePattern *regexp.Regexp
+ invalidRepoNamePattern *regexp.Regexp
+ reservedRepoNames []string
+ reservedRepoNamePatterns []string
}
var globalVars = sync.OnceValue(func() *globalVarsStruct {
return &globalVarsStruct{
- validRepoNamePattern: regexp.MustCompile(`[-.\w]+`),
- invalidRepoNamePattern: regexp.MustCompile(`[.]{2,}`),
- reservedRepoNames: []string{".", "..", "-"},
- reservedRepoPatterns: []string{"*.git", "*.wiki", "*.rss", "*.atom"},
+ validRepoNamePattern: regexp.MustCompile(`^[-.\w]+$`),
+ invalidRepoNamePattern: regexp.MustCompile(`[.]{2,}`),
+ reservedRepoNames: []string{".", "..", "-"},
+ reservedRepoNamePatterns: []string{"*.wiki", "*.git", "*.rss", "*.atom"},
}
})
@@ -85,7 +86,16 @@ func IsUsableRepoName(name string) error {
// Note: usually this error is normally caught up earlier in the UI
return db.ErrNameCharsNotAllowed{Name: name}
}
- return db.IsUsableName(vars.reservedRepoNames, vars.reservedRepoPatterns, name)
+ return db.IsUsableName(vars.reservedRepoNames, vars.reservedRepoNamePatterns, name)
+}
+
+// IsValidSSHAccessRepoName is like IsUsableRepoName, but it allows "*.wiki" because wiki repo needs to be accessed in SSH code
+func IsValidSSHAccessRepoName(name string) bool {
+ vars := globalVars()
+ if !vars.validRepoNamePattern.MatchString(name) || vars.invalidRepoNamePattern.MatchString(name) {
+ return false
+ }
+ return db.IsUsableName(vars.reservedRepoNames, vars.reservedRepoNamePatterns[1:], name) == nil
}
// TrustModelType defines the types of trust model for this repository
@@ -215,12 +225,24 @@ func init() {
db.RegisterModel(new(Repository))
}
-func (repo *Repository) GetName() string {
- return repo.Name
+func RelativePath(ownerName, repoName string) string {
+ return strings.ToLower(ownerName) + "/" + strings.ToLower(repoName) + ".git"
}
-func (repo *Repository) GetOwnerName() string {
- return repo.OwnerName
+// RelativePath should be an unix style path like username/reponame.git
+func (repo *Repository) RelativePath() string {
+ return RelativePath(repo.OwnerName, repo.Name)
+}
+
+type StorageRepo string
+
+// RelativePath should be an unix style path like username/reponame.git
+func (sr StorageRepo) RelativePath() string {
+ return string(sr)
+}
+
+func (repo *Repository) WikiStorageRepo() StorageRepo {
+ return StorageRepo(strings.ToLower(repo.OwnerName) + "/" + strings.ToLower(repo.Name) + ".wiki.git")
}
// SanitizedOriginalURL returns a sanitized OriginalURL
@@ -413,32 +435,33 @@ func (repo *Repository) MustGetUnit(ctx context.Context, tp unit.Type) *RepoUnit
return ru
}
- if tp == unit.TypeExternalWiki {
+ switch tp {
+ case unit.TypeExternalWiki:
return &RepoUnit{
Type: tp,
Config: new(ExternalWikiConfig),
}
- } else if tp == unit.TypeExternalTracker {
+ case unit.TypeExternalTracker:
return &RepoUnit{
Type: tp,
Config: new(ExternalTrackerConfig),
}
- } else if tp == unit.TypePullRequests {
+ case unit.TypePullRequests:
return &RepoUnit{
Type: tp,
Config: new(PullRequestsConfig),
}
- } else if tp == unit.TypeIssues {
+ case unit.TypeIssues:
return &RepoUnit{
Type: tp,
Config: new(IssuesConfig),
}
- } else if tp == unit.TypeActions {
+ case unit.TypeActions:
return &RepoUnit{
Type: tp,
Config: new(ActionsConfig),
}
- } else if tp == unit.TypeProjects {
+ case unit.TypeProjects:
cfg := new(ProjectsConfig)
cfg.ProjectsMode = ProjectsModeNone
return &RepoUnit{
@@ -498,15 +521,15 @@ func (repo *Repository) composeCommonMetas(ctx context.Context) map[string]strin
"repo": repo.Name,
}
- unit, err := repo.GetUnit(ctx, unit.TypeExternalTracker)
+ unitExternalTracker, err := repo.GetUnit(ctx, unit.TypeExternalTracker)
if err == nil {
- metas["format"] = unit.ExternalTrackerConfig().ExternalTrackerFormat
- switch unit.ExternalTrackerConfig().ExternalTrackerStyle {
+ metas["format"] = unitExternalTracker.ExternalTrackerConfig().ExternalTrackerFormat
+ switch unitExternalTracker.ExternalTrackerConfig().ExternalTrackerStyle {
case markup.IssueNameStyleAlphanumeric:
metas["style"] = markup.IssueNameStyleAlphanumeric
case markup.IssueNameStyleRegexp:
metas["style"] = markup.IssueNameStyleRegexp
- metas["regexp"] = unit.ExternalTrackerConfig().ExternalTrackerRegexpPattern
+ metas["regexp"] = unitExternalTracker.ExternalTrackerConfig().ExternalTrackerRegexpPattern
default:
metas["style"] = markup.IssueNameStyleNumeric
}
@@ -530,11 +553,11 @@ func (repo *Repository) composeCommonMetas(ctx context.Context) map[string]strin
return repo.commonRenderingMetas
}
-// ComposeMetas composes a map of metas for properly rendering comments or comment-like contents (commit message)
-func (repo *Repository) ComposeMetas(ctx context.Context) map[string]string {
+// ComposeCommentMetas composes a map of metas for properly rendering comments or comment-like contents (commit message)
+func (repo *Repository) ComposeCommentMetas(ctx context.Context) map[string]string {
metas := maps.Clone(repo.composeCommonMetas(ctx))
- metas["markdownLineBreakStyle"] = "comment"
- metas["markupAllowShortIssuePattern"] = "true"
+ metas["markdownNewLineHardBreak"] = strconv.FormatBool(setting.Markdown.RenderOptionsComment.NewLineHardBreak)
+ metas["markupAllowShortIssuePattern"] = strconv.FormatBool(setting.Markdown.RenderOptionsComment.ShortIssuePattern)
return metas
}
@@ -542,16 +565,17 @@ func (repo *Repository) ComposeMetas(ctx context.Context) map[string]string {
func (repo *Repository) ComposeWikiMetas(ctx context.Context) map[string]string {
// does wiki need the "teams" and "org" from common metas?
metas := maps.Clone(repo.composeCommonMetas(ctx))
- metas["markdownLineBreakStyle"] = "document"
- metas["markupAllowShortIssuePattern"] = "true"
+ metas["markdownNewLineHardBreak"] = strconv.FormatBool(setting.Markdown.RenderOptionsWiki.NewLineHardBreak)
+ metas["markupAllowShortIssuePattern"] = strconv.FormatBool(setting.Markdown.RenderOptionsWiki.ShortIssuePattern)
return metas
}
-// ComposeDocumentMetas composes a map of metas for properly rendering documents (repo files)
-func (repo *Repository) ComposeDocumentMetas(ctx context.Context) map[string]string {
+// ComposeRepoFileMetas composes a map of metas for properly rendering documents (repo files)
+func (repo *Repository) ComposeRepoFileMetas(ctx context.Context) map[string]string {
// does document(file) need the "teams" and "org" from common metas?
metas := maps.Clone(repo.composeCommonMetas(ctx))
- metas["markdownLineBreakStyle"] = "document"
+ metas["markdownNewLineHardBreak"] = strconv.FormatBool(setting.Markdown.RenderOptionsRepoFile.NewLineHardBreak)
+ metas["markupAllowShortIssuePattern"] = strconv.FormatBool(setting.Markdown.RenderOptionsRepoFile.ShortIssuePattern)
return metas
}
@@ -628,8 +652,14 @@ func (repo *Repository) AllowsPulls(ctx context.Context) bool {
}
// CanEnableEditor returns true if repository meets the requirements of web editor.
+// FIXME: most CanEnableEditor calls should be replaced with CanContentChange
+// And all other like CanCreateBranch / CanEnablePulls should also be updated
func (repo *Repository) CanEnableEditor() bool {
- return !repo.IsMirror
+ return repo.CanContentChange()
+}
+
+func (repo *Repository) CanContentChange() bool {
+ return !repo.IsMirror && !repo.IsArchived
}
// DescriptionHTML does special handles to description and return HTML string.
@@ -807,7 +837,7 @@ func GetRepositoryByName(ctx context.Context, ownerID int64, name string) (*Repo
func GetRepositoryByURL(ctx context.Context, repoURL string) (*Repository, error) {
ret, err := giturl.ParseRepositoryURL(ctx, repoURL)
if err != nil || ret.OwnerName == "" {
- return nil, fmt.Errorf("unknown or malformed repository URL")
+ return nil, errors.New("unknown or malformed repository URL")
}
return GetRepositoryByOwnerAndName(ctx, ret.OwnerName, ret.RepoName)
}
diff --git a/models/repo/repo_list.go b/models/repo/repo_list.go
index 02c228e8a0..f2cdd2f284 100644
--- a/models/repo/repo_list.go
+++ b/models/repo/repo_list.go
@@ -359,7 +359,7 @@ func UserOrgPublicUnitRepoCond(userID, orgID int64) builder.Cond {
}
// SearchRepositoryCondition creates a query condition according search repository options
-func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond {
+func SearchRepositoryCondition(opts SearchRepoOptions) builder.Cond {
cond := builder.NewCond()
if opts.Private {
@@ -449,7 +449,7 @@ func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond {
if opts.Keyword != "" {
// separate keyword
subQueryCond := builder.NewCond()
- for _, v := range strings.Split(opts.Keyword, ",") {
+ for v := range strings.SplitSeq(opts.Keyword, ",") {
if opts.TopicOnly {
subQueryCond = subQueryCond.Or(builder.Eq{"topic.name": strings.ToLower(v)})
} else {
@@ -464,7 +464,7 @@ func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond {
keywordCond := builder.In("id", subQuery)
if !opts.TopicOnly {
likes := builder.NewCond()
- for _, v := range strings.Split(opts.Keyword, ",") {
+ for v := range strings.SplitSeq(opts.Keyword, ",") {
likes = likes.Or(builder.Like{"lower_name", strings.ToLower(v)})
// If the string looks like "org/repo", match against that pattern too
@@ -551,18 +551,18 @@ func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond {
// SearchRepository returns repositories based on search options,
// it returns results in given range and number of total results.
-func SearchRepository(ctx context.Context, opts *SearchRepoOptions) (RepositoryList, int64, error) {
+func SearchRepository(ctx context.Context, opts SearchRepoOptions) (RepositoryList, int64, error) {
cond := SearchRepositoryCondition(opts)
return SearchRepositoryByCondition(ctx, opts, cond, true)
}
// CountRepository counts repositories based on search options,
-func CountRepository(ctx context.Context, opts *SearchRepoOptions) (int64, error) {
+func CountRepository(ctx context.Context, opts SearchRepoOptions) (int64, error) {
return db.GetEngine(ctx).Where(SearchRepositoryCondition(opts)).Count(new(Repository))
}
// SearchRepositoryByCondition search repositories by condition
-func SearchRepositoryByCondition(ctx context.Context, opts *SearchRepoOptions, cond builder.Cond, loadAttributes bool) (RepositoryList, int64, error) {
+func SearchRepositoryByCondition(ctx context.Context, opts SearchRepoOptions, cond builder.Cond, loadAttributes bool) (RepositoryList, int64, error) {
sess, count, err := searchRepositoryByCondition(ctx, opts, cond)
if err != nil {
return nil, 0, err
@@ -590,23 +590,25 @@ func SearchRepositoryByCondition(ctx context.Context, opts *SearchRepoOptions, c
return repos, count, nil
}
-func searchRepositoryByCondition(ctx context.Context, opts *SearchRepoOptions, cond builder.Cond) (db.Engine, int64, error) {
- if opts.Page <= 0 {
- opts.Page = 1
+func searchRepositoryByCondition(ctx context.Context, opts SearchRepoOptions, cond builder.Cond) (db.Engine, int64, error) {
+ page := opts.Page
+ if page <= 0 {
+ page = 1
}
- if len(opts.OrderBy) == 0 {
- opts.OrderBy = db.SearchOrderByAlphabetically
+ orderBy := opts.OrderBy
+ if len(orderBy) == 0 {
+ orderBy = db.SearchOrderByAlphabetically
}
args := make([]any, 0)
if opts.PriorityOwnerID > 0 {
- opts.OrderBy = db.SearchOrderBy(fmt.Sprintf("CASE WHEN owner_id = ? THEN 0 ELSE owner_id END, %s", opts.OrderBy))
+ orderBy = db.SearchOrderBy(fmt.Sprintf("CASE WHEN owner_id = ? THEN 0 ELSE owner_id END, %s", orderBy))
args = append(args, opts.PriorityOwnerID)
} else if strings.Count(opts.Keyword, "/") == 1 {
// With "owner/repo" search times, prioritise results which match the owner field
orgName := strings.Split(opts.Keyword, "/")[0]
- opts.OrderBy = db.SearchOrderBy(fmt.Sprintf("CASE WHEN owner_name LIKE ? THEN 0 ELSE 1 END, %s", opts.OrderBy))
+ orderBy = db.SearchOrderBy(fmt.Sprintf("CASE WHEN owner_name LIKE ? THEN 0 ELSE 1 END, %s", orderBy))
args = append(args, orgName)
}
@@ -623,9 +625,9 @@ func searchRepositoryByCondition(ctx context.Context, opts *SearchRepoOptions, c
}
}
- sess = sess.Where(cond).OrderBy(opts.OrderBy.String(), args...)
+ sess = sess.Where(cond).OrderBy(orderBy.String(), args...)
if opts.PageSize > 0 {
- sess = sess.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize)
+ sess = sess.Limit(opts.PageSize, (page-1)*opts.PageSize)
}
return sess, count, nil
}
@@ -689,14 +691,14 @@ func AccessibleRepositoryCondition(user *user_model.User, unitType unit.Type) bu
// SearchRepositoryByName takes keyword and part of repository name to search,
// it returns results in given range and number of total results.
-func SearchRepositoryByName(ctx context.Context, opts *SearchRepoOptions) (RepositoryList, int64, error) {
+func SearchRepositoryByName(ctx context.Context, opts SearchRepoOptions) (RepositoryList, int64, error) {
opts.IncludeDescription = false
return SearchRepository(ctx, opts)
}
// SearchRepositoryIDs takes keyword and part of repository name to search,
// it returns results in given range and number of total results.
-func SearchRepositoryIDs(ctx context.Context, opts *SearchRepoOptions) ([]int64, int64, error) {
+func SearchRepositoryIDs(ctx context.Context, opts SearchRepoOptions) ([]int64, int64, error) {
opts.IncludeDescription = false
cond := SearchRepositoryCondition(opts)
@@ -740,7 +742,7 @@ func FindUserCodeAccessibleOwnerRepoIDs(ctx context.Context, ownerID int64, user
}
// GetUserRepositories returns a list of repositories of given user.
-func GetUserRepositories(ctx context.Context, opts *SearchRepoOptions) (RepositoryList, int64, error) {
+func GetUserRepositories(ctx context.Context, opts SearchRepoOptions) (RepositoryList, int64, error) {
if len(opts.OrderBy) == 0 {
opts.OrderBy = "updated_unix DESC"
}
@@ -767,5 +769,5 @@ func GetUserRepositories(ctx context.Context, opts *SearchRepoOptions) (Reposito
sess = sess.Where(cond).OrderBy(opts.OrderBy.String())
repos := make(RepositoryList, 0, opts.PageSize)
- return repos, count, db.SetSessionPagination(sess, opts).Find(&repos)
+ return repos, count, db.SetSessionPagination(sess, &opts).Find(&repos)
}
diff --git a/models/repo/repo_list_test.go b/models/repo/repo_list_test.go
index ca6007f6c7..7eb76416c2 100644
--- a/models/repo/repo_list_test.go
+++ b/models/repo/repo_list_test.go
@@ -17,162 +17,162 @@ import (
func getTestCases() []struct {
name string
- opts *repo_model.SearchRepoOptions
+ opts repo_model.SearchRepoOptions
count int
} {
testCases := []struct {
name string
- opts *repo_model.SearchRepoOptions
+ opts repo_model.SearchRepoOptions
count int
}{
{
name: "PublicRepositoriesByName",
- opts: &repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{PageSize: 10}, Collaborate: optional.Some(false)},
+ opts: repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{PageSize: 10}, Collaborate: optional.Some(false)},
count: 7,
},
{
name: "PublicAndPrivateRepositoriesByName",
- opts: &repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, Private: true, Collaborate: optional.Some(false)},
+ opts: repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, Private: true, Collaborate: optional.Some(false)},
count: 14,
},
{
name: "PublicAndPrivateRepositoriesByNameWithPagesizeLimitFirstPage",
- opts: &repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 1, PageSize: 5}, Private: true, Collaborate: optional.Some(false)},
+ opts: repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 1, PageSize: 5}, Private: true, Collaborate: optional.Some(false)},
count: 14,
},
{
name: "PublicAndPrivateRepositoriesByNameWithPagesizeLimitSecondPage",
- opts: &repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 2, PageSize: 5}, Private: true, Collaborate: optional.Some(false)},
+ opts: repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 2, PageSize: 5}, Private: true, Collaborate: optional.Some(false)},
count: 14,
},
{
name: "PublicAndPrivateRepositoriesByNameWithPagesizeLimitThirdPage",
- opts: &repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 3, PageSize: 5}, Private: true, Collaborate: optional.Some(false)},
+ opts: repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 3, PageSize: 5}, Private: true, Collaborate: optional.Some(false)},
count: 14,
},
{
name: "PublicAndPrivateRepositoriesByNameWithPagesizeLimitFourthPage",
- opts: &repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 3, PageSize: 5}, Private: true, Collaborate: optional.Some(false)},
+ opts: repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 3, PageSize: 5}, Private: true, Collaborate: optional.Some(false)},
count: 14,
},
{
name: "PublicRepositoriesOfUser",
- opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Collaborate: optional.Some(false)},
+ opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Collaborate: optional.Some(false)},
count: 2,
},
{
name: "PublicRepositoriesOfUser2",
- opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 18, Collaborate: optional.Some(false)},
+ opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 18, Collaborate: optional.Some(false)},
count: 0,
},
{
name: "PublicRepositoriesOfOrg3",
- opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 20, Collaborate: optional.Some(false)},
+ opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 20, Collaborate: optional.Some(false)},
count: 2,
},
{
name: "PublicAndPrivateRepositoriesOfUser",
- opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true, Collaborate: optional.Some(false)},
+ opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true, Collaborate: optional.Some(false)},
count: 4,
},
{
name: "PublicAndPrivateRepositoriesOfUser2",
- opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 18, Private: true, Collaborate: optional.Some(false)},
+ opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 18, Private: true, Collaborate: optional.Some(false)},
count: 0,
},
{
name: "PublicAndPrivateRepositoriesOfOrg3",
- opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 20, Private: true, Collaborate: optional.Some(false)},
+ opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 20, Private: true, Collaborate: optional.Some(false)},
count: 4,
},
{
name: "PublicRepositoriesOfUserIncludingCollaborative",
- opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15},
+ opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15},
count: 5,
},
{
name: "PublicRepositoriesOfUser2IncludingCollaborative",
- opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 18},
+ opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 18},
count: 1,
},
{
name: "PublicRepositoriesOfOrg3IncludingCollaborative",
- opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 20},
+ opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 20},
count: 3,
},
{
name: "PublicAndPrivateRepositoriesOfUserIncludingCollaborative",
- opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true},
+ opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true},
count: 9,
},
{
name: "PublicAndPrivateRepositoriesOfUser2IncludingCollaborative",
- opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 18, Private: true},
+ opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 18, Private: true},
count: 4,
},
{
name: "PublicAndPrivateRepositoriesOfOrg3IncludingCollaborative",
- opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 20, Private: true},
+ opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 20, Private: true},
count: 7,
},
{
name: "PublicRepositoriesOfOrganization",
- opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 17, Collaborate: optional.Some(false)},
+ opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 17, Collaborate: optional.Some(false)},
count: 1,
},
{
name: "PublicAndPrivateRepositoriesOfOrganization",
- opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 17, Private: true, Collaborate: optional.Some(false)},
+ opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 17, Private: true, Collaborate: optional.Some(false)},
count: 2,
},
{
name: "AllPublic/PublicRepositoriesByName",
- opts: &repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{PageSize: 10}, AllPublic: true, Collaborate: optional.Some(false)},
+ opts: repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{PageSize: 10}, AllPublic: true, Collaborate: optional.Some(false)},
count: 7,
},
{
name: "AllPublic/PublicAndPrivateRepositoriesByName",
- opts: &repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, Private: true, AllPublic: true, Collaborate: optional.Some(false)},
+ opts: repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, Private: true, AllPublic: true, Collaborate: optional.Some(false)},
count: 14,
},
{
name: "AllPublic/PublicRepositoriesOfUserIncludingCollaborative",
- opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, AllPublic: true, Template: optional.Some(false)},
+ opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, AllPublic: true, Template: optional.Some(false)},
count: 34,
},
{
name: "AllPublic/PublicAndPrivateRepositoriesOfUserIncludingCollaborative",
- opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true, AllPublic: true, AllLimited: true, Template: optional.Some(false)},
+ opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true, AllPublic: true, AllLimited: true, Template: optional.Some(false)},
count: 39,
},
{
name: "AllPublic/PublicAndPrivateRepositoriesOfUserIncludingCollaborativeByName",
- opts: &repo_model.SearchRepoOptions{Keyword: "test", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true, AllPublic: true},
+ opts: repo_model.SearchRepoOptions{Keyword: "test", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true, AllPublic: true},
count: 15,
},
{
name: "AllPublic/PublicAndPrivateRepositoriesOfUser2IncludingCollaborativeByName",
- opts: &repo_model.SearchRepoOptions{Keyword: "test", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 18, Private: true, AllPublic: true},
+ opts: repo_model.SearchRepoOptions{Keyword: "test", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 18, Private: true, AllPublic: true},
count: 13,
},
{
name: "AllPublic/PublicRepositoriesOfOrganization",
- opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 17, AllPublic: true, Collaborate: optional.Some(false), Template: optional.Some(false)},
+ opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 17, AllPublic: true, Collaborate: optional.Some(false), Template: optional.Some(false)},
count: 34,
},
{
name: "AllTemplates",
- opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, Template: optional.Some(true)},
+ opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, Template: optional.Some(true)},
count: 2,
},
{
name: "OwnerSlashRepoSearch",
- opts: &repo_model.SearchRepoOptions{Keyword: "user/repo2", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, Private: true, OwnerID: 0},
+ opts: repo_model.SearchRepoOptions{Keyword: "user/repo2", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, Private: true, OwnerID: 0},
count: 2,
},
{
name: "OwnerSlashSearch",
- opts: &repo_model.SearchRepoOptions{Keyword: "user20/", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, Private: true, OwnerID: 0},
+ opts: repo_model.SearchRepoOptions{Keyword: "user20/", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, Private: true, OwnerID: 0},
count: 4,
},
}
@@ -184,7 +184,7 @@ func TestSearchRepository(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
// test search public repository on explore page
- repos, count, err := repo_model.SearchRepositoryByName(db.DefaultContext, &repo_model.SearchRepoOptions{
+ repos, count, err := repo_model.SearchRepositoryByName(db.DefaultContext, repo_model.SearchRepoOptions{
ListOptions: db.ListOptions{
Page: 1,
PageSize: 10,
@@ -199,7 +199,7 @@ func TestSearchRepository(t *testing.T) {
}
assert.Equal(t, int64(1), count)
- repos, count, err = repo_model.SearchRepositoryByName(db.DefaultContext, &repo_model.SearchRepoOptions{
+ repos, count, err = repo_model.SearchRepositoryByName(db.DefaultContext, repo_model.SearchRepoOptions{
ListOptions: db.ListOptions{
Page: 1,
PageSize: 10,
@@ -213,7 +213,7 @@ func TestSearchRepository(t *testing.T) {
assert.Len(t, repos, 2)
// test search private repository on explore page
- repos, count, err = repo_model.SearchRepositoryByName(db.DefaultContext, &repo_model.SearchRepoOptions{
+ repos, count, err = repo_model.SearchRepositoryByName(db.DefaultContext, repo_model.SearchRepoOptions{
ListOptions: db.ListOptions{
Page: 1,
PageSize: 10,
@@ -229,7 +229,7 @@ func TestSearchRepository(t *testing.T) {
}
assert.Equal(t, int64(1), count)
- repos, count, err = repo_model.SearchRepositoryByName(db.DefaultContext, &repo_model.SearchRepoOptions{
+ repos, count, err = repo_model.SearchRepositoryByName(db.DefaultContext, repo_model.SearchRepoOptions{
ListOptions: db.ListOptions{
Page: 1,
PageSize: 10,
@@ -244,14 +244,14 @@ func TestSearchRepository(t *testing.T) {
assert.Len(t, repos, 3)
// Test non existing owner
- repos, count, err = repo_model.SearchRepositoryByName(db.DefaultContext, &repo_model.SearchRepoOptions{OwnerID: unittest.NonexistentID})
+ repos, count, err = repo_model.SearchRepositoryByName(db.DefaultContext, repo_model.SearchRepoOptions{OwnerID: unittest.NonexistentID})
assert.NoError(t, err)
assert.Empty(t, repos)
assert.Equal(t, int64(0), count)
// Test search within description
- repos, count, err = repo_model.SearchRepository(db.DefaultContext, &repo_model.SearchRepoOptions{
+ repos, count, err = repo_model.SearchRepository(db.DefaultContext, repo_model.SearchRepoOptions{
ListOptions: db.ListOptions{
Page: 1,
PageSize: 10,
@@ -268,7 +268,7 @@ func TestSearchRepository(t *testing.T) {
assert.Equal(t, int64(1), count)
// Test NOT search within description
- repos, count, err = repo_model.SearchRepository(db.DefaultContext, &repo_model.SearchRepoOptions{
+ repos, count, err = repo_model.SearchRepository(db.DefaultContext, repo_model.SearchRepoOptions{
ListOptions: db.ListOptions{
Page: 1,
PageSize: 10,
@@ -374,22 +374,22 @@ func TestSearchRepositoryByTopicName(t *testing.T) {
testCases := []struct {
name string
- opts *repo_model.SearchRepoOptions
+ opts repo_model.SearchRepoOptions
count int
}{
{
name: "AllPublic/SearchPublicRepositoriesFromTopicAndName",
- opts: &repo_model.SearchRepoOptions{OwnerID: 21, AllPublic: true, Keyword: "graphql"},
+ opts: repo_model.SearchRepoOptions{OwnerID: 21, AllPublic: true, Keyword: "graphql"},
count: 2,
},
{
name: "AllPublic/OnlySearchPublicRepositoriesFromTopic",
- opts: &repo_model.SearchRepoOptions{OwnerID: 21, AllPublic: true, Keyword: "graphql", TopicOnly: true},
+ opts: repo_model.SearchRepoOptions{OwnerID: 21, AllPublic: true, Keyword: "graphql", TopicOnly: true},
count: 1,
},
{
name: "AllPublic/OnlySearchMultipleKeywordPublicRepositoriesFromTopic",
- opts: &repo_model.SearchRepoOptions{OwnerID: 21, AllPublic: true, Keyword: "graphql,golang", TopicOnly: true},
+ opts: repo_model.SearchRepoOptions{OwnerID: 21, AllPublic: true, Keyword: "graphql,golang", TopicOnly: true},
count: 2,
},
}
diff --git a/models/repo/repo_test.go b/models/repo/repo_test.go
index dffbd18261..66abe864fc 100644
--- a/models/repo/repo_test.go
+++ b/models/repo/repo_test.go
@@ -86,7 +86,7 @@ func TestMetas(t *testing.T) {
repo.Units = nil
- metas := repo.ComposeMetas(db.DefaultContext)
+ metas := repo.ComposeCommentMetas(db.DefaultContext)
assert.Equal(t, "testRepo", metas["repo"])
assert.Equal(t, "testOwner", metas["user"])
@@ -100,7 +100,7 @@ func TestMetas(t *testing.T) {
testSuccess := func(expectedStyle string) {
repo.Units = []*RepoUnit{&externalTracker}
repo.commonRenderingMetas = nil
- metas := repo.ComposeMetas(db.DefaultContext)
+ metas := repo.ComposeCommentMetas(db.DefaultContext)
assert.Equal(t, expectedStyle, metas["style"])
assert.Equal(t, "testRepo", metas["repo"])
assert.Equal(t, "testOwner", metas["user"])
@@ -121,7 +121,7 @@ func TestMetas(t *testing.T) {
repo, err := GetRepositoryByID(db.DefaultContext, 3)
assert.NoError(t, err)
- metas = repo.ComposeMetas(db.DefaultContext)
+ metas = repo.ComposeCommentMetas(db.DefaultContext)
assert.Contains(t, metas, "org")
assert.Contains(t, metas, "teams")
assert.Equal(t, "org3", metas["org"])
@@ -216,8 +216,23 @@ func TestIsUsableRepoName(t *testing.T) {
assert.Error(t, IsUsableRepoName("-"))
assert.Error(t, IsUsableRepoName("🌞"))
+ assert.Error(t, IsUsableRepoName("the/repo"))
assert.Error(t, IsUsableRepoName("the..repo"))
assert.Error(t, IsUsableRepoName("foo.wiki"))
assert.Error(t, IsUsableRepoName("foo.git"))
assert.Error(t, IsUsableRepoName("foo.RSS"))
}
+
+func TestIsValidSSHAccessRepoName(t *testing.T) {
+ assert.True(t, IsValidSSHAccessRepoName("a"))
+ assert.True(t, IsValidSSHAccessRepoName("-1_."))
+ assert.True(t, IsValidSSHAccessRepoName(".profile"))
+ assert.True(t, IsValidSSHAccessRepoName("foo.wiki"))
+
+ assert.False(t, IsValidSSHAccessRepoName("-"))
+ assert.False(t, IsValidSSHAccessRepoName("🌞"))
+ assert.False(t, IsValidSSHAccessRepoName("the/repo"))
+ assert.False(t, IsValidSSHAccessRepoName("the..repo"))
+ assert.False(t, IsValidSSHAccessRepoName("foo.git"))
+ assert.False(t, IsValidSSHAccessRepoName("foo.RSS"))
+}
diff --git a/models/repo/repo_unit.go b/models/repo/repo_unit.go
index cb52c2c9e2..a5207bc22a 100644
--- a/models/repo/repo_unit.go
+++ b/models/repo/repo_unit.go
@@ -5,7 +5,6 @@ package repo
import (
"context"
- "fmt"
"slices"
"strings"
@@ -33,7 +32,7 @@ func IsErrUnitTypeNotExist(err error) bool {
}
func (err ErrUnitTypeNotExist) Error() string {
- return fmt.Sprintf("Unit type does not exist: %s", err.UT.LogString())
+ return "Unit type does not exist: " + err.UT.LogString()
}
func (err ErrUnitTypeNotExist) Unwrap() error {
@@ -42,12 +41,13 @@ func (err ErrUnitTypeNotExist) Unwrap() error {
// RepoUnit describes all units of a repository
type RepoUnit struct { //revive:disable-line:exported
- ID int64
- RepoID int64 `xorm:"INDEX(s)"`
- Type unit.Type `xorm:"INDEX(s)"`
- Config convert.Conversion `xorm:"TEXT"`
- CreatedUnix timeutil.TimeStamp `xorm:"INDEX CREATED"`
- EveryoneAccessMode perm.AccessMode `xorm:"NOT NULL DEFAULT 0"`
+ ID int64
+ RepoID int64 `xorm:"INDEX(s)"`
+ Type unit.Type `xorm:"INDEX(s)"`
+ Config convert.Conversion `xorm:"TEXT"`
+ CreatedUnix timeutil.TimeStamp `xorm:"INDEX CREATED"`
+ AnonymousAccessMode perm.AccessMode `xorm:"NOT NULL DEFAULT 0"`
+ EveryoneAccessMode perm.AccessMode `xorm:"NOT NULL DEFAULT 0"`
}
func init() {
@@ -185,10 +185,8 @@ func (cfg *ActionsConfig) IsWorkflowDisabled(file string) bool {
}
func (cfg *ActionsConfig) DisableWorkflow(file string) {
- for _, workflow := range cfg.DisabledWorkflows {
- if file == workflow {
- return
- }
+ if slices.Contains(cfg.DisabledWorkflows, file) {
+ return
}
cfg.DisabledWorkflows = append(cfg.DisabledWorkflows, file)
@@ -341,3 +339,9 @@ func UpdateRepoUnit(ctx context.Context, unit *RepoUnit) error {
_, err := db.GetEngine(ctx).ID(unit.ID).Update(unit)
return err
}
+
+func UpdateRepoUnitPublicAccess(ctx context.Context, unit *RepoUnit) error {
+ _, err := db.GetEngine(ctx).Where("repo_id=? AND `type`=?", unit.RepoID, unit.Type).
+ Cols("anonymous_access_mode", "everyone_access_mode").Update(unit)
+ return err
+}
diff --git a/models/repo/repo_unit_test.go b/models/repo/repo_unit_test.go
index a760594013..56dda5672d 100644
--- a/models/repo/repo_unit_test.go
+++ b/models/repo/repo_unit_test.go
@@ -12,19 +12,19 @@ import (
func TestActionsConfig(t *testing.T) {
cfg := &ActionsConfig{}
cfg.DisableWorkflow("test1.yaml")
- assert.EqualValues(t, []string{"test1.yaml"}, cfg.DisabledWorkflows)
+ assert.Equal(t, []string{"test1.yaml"}, cfg.DisabledWorkflows)
cfg.DisableWorkflow("test1.yaml")
- assert.EqualValues(t, []string{"test1.yaml"}, cfg.DisabledWorkflows)
+ assert.Equal(t, []string{"test1.yaml"}, cfg.DisabledWorkflows)
cfg.EnableWorkflow("test1.yaml")
- assert.EqualValues(t, []string{}, cfg.DisabledWorkflows)
+ assert.Equal(t, []string{}, cfg.DisabledWorkflows)
cfg.EnableWorkflow("test1.yaml")
- assert.EqualValues(t, []string{}, cfg.DisabledWorkflows)
+ assert.Equal(t, []string{}, cfg.DisabledWorkflows)
cfg.DisableWorkflow("test1.yaml")
cfg.DisableWorkflow("test2.yaml")
cfg.DisableWorkflow("test3.yaml")
- assert.EqualValues(t, "test1.yaml,test2.yaml,test3.yaml", cfg.ToString())
+ assert.Equal(t, "test1.yaml,test2.yaml,test3.yaml", cfg.ToString())
}
diff --git a/models/repo/star.go b/models/repo/star.go
index 4c66855525..bc865f8373 100644
--- a/models/repo/star.go
+++ b/models/repo/star.go
@@ -25,48 +25,45 @@ func init() {
// StarRepo or unstar repository.
func StarRepo(ctx context.Context, doer *user_model.User, repo *Repository, star bool) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
- staring := IsStaring(ctx, doer.ID, repo.ID)
-
- if star {
- if user_model.IsUserBlockedBy(ctx, doer, repo.OwnerID) {
- return user_model.ErrBlockedUser
- }
-
- if staring {
- return nil
- }
-
- if err := db.Insert(ctx, &Star{UID: doer.ID, RepoID: repo.ID}); err != nil {
- return err
- }
- if _, err := db.Exec(ctx, "UPDATE `repository` SET num_stars = num_stars + 1 WHERE id = ?", repo.ID); err != nil {
- return err
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ staring := IsStaring(ctx, doer.ID, repo.ID)
+
+ if star {
+ if user_model.IsUserBlockedBy(ctx, doer, repo.OwnerID) {
+ return user_model.ErrBlockedUser
+ }
+
+ if staring {
+ return nil
+ }
+
+ if err := db.Insert(ctx, &Star{UID: doer.ID, RepoID: repo.ID}); err != nil {
+ return err
+ }
+ if _, err := db.Exec(ctx, "UPDATE `repository` SET num_stars = num_stars + 1 WHERE id = ?", repo.ID); err != nil {
+ return err
+ }
+ if _, err := db.Exec(ctx, "UPDATE `user` SET num_stars = num_stars + 1 WHERE id = ?", doer.ID); err != nil {
+ return err
+ }
+ } else {
+ if !staring {
+ return nil
+ }
+
+ if _, err := db.DeleteByBean(ctx, &Star{UID: doer.ID, RepoID: repo.ID}); err != nil {
+ return err
+ }
+ if _, err := db.Exec(ctx, "UPDATE `repository` SET num_stars = num_stars - 1 WHERE id = ?", repo.ID); err != nil {
+ return err
+ }
+ if _, err := db.Exec(ctx, "UPDATE `user` SET num_stars = num_stars - 1 WHERE id = ?", doer.ID); err != nil {
+ return err
+ }
}
- if _, err := db.Exec(ctx, "UPDATE `user` SET num_stars = num_stars + 1 WHERE id = ?", doer.ID); err != nil {
- return err
- }
- } else {
- if !staring {
- return nil
- }
-
- if _, err := db.DeleteByBean(ctx, &Star{UID: doer.ID, RepoID: repo.ID}); err != nil {
- return err
- }
- if _, err := db.Exec(ctx, "UPDATE `repository` SET num_stars = num_stars - 1 WHERE id = ?", repo.ID); err != nil {
- return err
- }
- if _, err := db.Exec(ctx, "UPDATE `user` SET num_stars = num_stars - 1 WHERE id = ?", doer.ID); err != nil {
- return err
- }
- }
- return committer.Commit()
+ return nil
+ })
}
// IsStaring checks if user has starred given repository.
diff --git a/models/repo/topic.go b/models/repo/topic.go
index 430a60f603..baeae01efa 100644
--- a/models/repo/topic.go
+++ b/models/repo/topic.go
@@ -227,32 +227,26 @@ func GetRepoTopicByName(ctx context.Context, repoID int64, topicName string) (*T
// AddTopic adds a topic name to a repository (if it does not already have it)
func AddTopic(ctx context.Context, repoID int64, topicName string) (*Topic, error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return nil, err
- }
- defer committer.Close()
- sess := db.GetEngine(ctx)
-
- topic, err := GetRepoTopicByName(ctx, repoID, topicName)
- if err != nil {
- return nil, err
- }
- if topic != nil {
- // Repo already have topic
- return topic, nil
- }
-
- topic, err = addTopicByNameToRepo(ctx, repoID, topicName)
- if err != nil {
- return nil, err
- }
+ return db.WithTx2(ctx, func(ctx context.Context) (*Topic, error) {
+ topic, err := GetRepoTopicByName(ctx, repoID, topicName)
+ if err != nil {
+ return nil, err
+ }
+ if topic != nil {
+ // Repo already have topic
+ return topic, nil
+ }
- if err = syncTopicsInRepository(sess, repoID); err != nil {
- return nil, err
- }
+ topic, err = addTopicByNameToRepo(ctx, repoID, topicName)
+ if err != nil {
+ return nil, err
+ }
- return topic, committer.Commit()
+ if err = syncTopicsInRepository(ctx, repoID); err != nil {
+ return nil, err
+ }
+ return topic, nil
+ })
}
// DeleteTopic removes a topic name from a repository (if it has it)
@@ -266,14 +260,15 @@ func DeleteTopic(ctx context.Context, repoID int64, topicName string) (*Topic, e
return nil, nil
}
- err = removeTopicFromRepo(ctx, repoID, topic)
- if err != nil {
- return nil, err
- }
-
- err = syncTopicsInRepository(db.GetEngine(ctx), repoID)
-
- return topic, err
+ return db.WithTx2(ctx, func(ctx context.Context) (*Topic, error) {
+ if err = removeTopicFromRepo(ctx, repoID, topic); err != nil {
+ return nil, err
+ }
+ if err = syncTopicsInRepository(ctx, repoID); err != nil {
+ return nil, err
+ }
+ return topic, nil
+ })
}
// SaveTopics save topics to a repository
@@ -285,64 +280,55 @@ func SaveTopics(ctx context.Context, repoID int64, topicNames ...string) error {
return err
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
- sess := db.GetEngine(ctx)
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ var addedTopicNames []string
+ for _, topicName := range topicNames {
+ if strings.TrimSpace(topicName) == "" {
+ continue
+ }
- var addedTopicNames []string
- for _, topicName := range topicNames {
- if strings.TrimSpace(topicName) == "" {
- continue
+ var found bool
+ for _, t := range topics {
+ if strings.EqualFold(topicName, t.Name) {
+ found = true
+ break
+ }
+ }
+ if !found {
+ addedTopicNames = append(addedTopicNames, topicName)
+ }
}
- var found bool
+ var removeTopics []*Topic
for _, t := range topics {
- if strings.EqualFold(topicName, t.Name) {
- found = true
- break
+ var found bool
+ for _, topicName := range topicNames {
+ if strings.EqualFold(topicName, t.Name) {
+ found = true
+ break
+ }
}
- }
- if !found {
- addedTopicNames = append(addedTopicNames, topicName)
- }
- }
-
- var removeTopics []*Topic
- for _, t := range topics {
- var found bool
- for _, topicName := range topicNames {
- if strings.EqualFold(topicName, t.Name) {
- found = true
- break
+ if !found {
+ removeTopics = append(removeTopics, t)
}
}
- if !found {
- removeTopics = append(removeTopics, t)
- }
- }
- for _, topicName := range addedTopicNames {
- _, err := addTopicByNameToRepo(ctx, repoID, topicName)
- if err != nil {
- return err
+ for _, topicName := range addedTopicNames {
+ _, err := addTopicByNameToRepo(ctx, repoID, topicName)
+ if err != nil {
+ return err
+ }
}
- }
- for _, topic := range removeTopics {
- err := removeTopicFromRepo(ctx, repoID, topic)
- if err != nil {
- return err
+ for _, topic := range removeTopics {
+ err := removeTopicFromRepo(ctx, repoID, topic)
+ if err != nil {
+ return err
+ }
}
- }
- if err := syncTopicsInRepository(sess, repoID); err != nil {
- return err
- }
-
- return committer.Commit()
+ return syncTopicsInRepository(ctx, repoID)
+ })
}
// GenerateTopics generates topics from a template repository
@@ -353,19 +339,19 @@ func GenerateTopics(ctx context.Context, templateRepo, generateRepo *Repository)
}
}
- return syncTopicsInRepository(db.GetEngine(ctx), generateRepo.ID)
+ return syncTopicsInRepository(ctx, generateRepo.ID)
}
// syncTopicsInRepository makes sure topics in the topics table are copied into the topics field of the repository
-func syncTopicsInRepository(sess db.Engine, repoID int64) error {
+func syncTopicsInRepository(ctx context.Context, repoID int64) error {
topicNames := make([]string, 0, 25)
- if err := sess.Table("topic").Cols("name").
+ if err := db.GetEngine(ctx).Table("topic").Cols("name").
Join("INNER", "repo_topic", "repo_topic.topic_id = topic.id").
Where("repo_topic.repo_id = ?", repoID).Asc("topic.name").Find(&topicNames); err != nil {
return err
}
- if _, err := sess.ID(repoID).Cols("topics").Update(&Repository{
+ if _, err := db.GetEngine(ctx).ID(repoID).Cols("topics").Update(&Repository{
Topics: topicNames,
}); err != nil {
return err
diff --git a/models/repo/topic_test.go b/models/repo/topic_test.go
index 1600896b6e..b6a7aed7b1 100644
--- a/models/repo/topic_test.go
+++ b/models/repo/topic_test.go
@@ -53,7 +53,7 @@ func TestAddTopic(t *testing.T) {
totalNrOfTopics++
topic, err := repo_model.GetTopicByName(db.DefaultContext, "gitea")
assert.NoError(t, err)
- assert.EqualValues(t, 1, topic.RepoCount)
+ assert.Equal(t, 1, topic.RepoCount)
topics, err = db.Find[repo_model.Topic](db.DefaultContext, &repo_model.FindTopicOptions{})
assert.NoError(t, err)
diff --git a/models/repo/transfer.go b/models/repo/transfer.go
index b669145d68..3fb8cb69ab 100644
--- a/models/repo/transfer.go
+++ b/models/repo/transfer.go
@@ -61,7 +61,7 @@ func (err ErrRepoTransferInProgress) Unwrap() error {
}
// RepoTransfer is used to manage repository transfers
-type RepoTransfer struct { //nolint
+type RepoTransfer struct { //nolint:revive // export stutter
ID int64 `xorm:"pk autoincr"`
DoerID int64
Doer *user_model.User `xorm:"-"`
@@ -249,7 +249,7 @@ func CreatePendingRepositoryTransfer(ctx context.Context, doer, newOwner *user_m
}
repo.Status = RepositoryPendingTransfer
- if err := UpdateRepositoryCols(ctx, repo, "status"); err != nil {
+ if err := UpdateRepositoryColsNoAutoTime(ctx, repo, "status"); err != nil {
return err
}
diff --git a/models/repo/update.go b/models/repo/update.go
index fce357a1ac..3228ae11a4 100644
--- a/models/repo/update.go
+++ b/models/repo/update.go
@@ -19,19 +19,14 @@ func UpdateRepositoryOwnerNames(ctx context.Context, ownerID int64, ownerName st
if ownerID == 0 {
return nil
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
- if _, err := db.GetEngine(ctx).Where("owner_id = ?", ownerID).Cols("owner_name").Update(&Repository{
+ if _, err := db.GetEngine(ctx).Where("owner_id = ?", ownerID).Cols("owner_name").NoAutoTime().Update(&Repository{
OwnerName: ownerName,
}); err != nil {
return err
}
- return committer.Commit()
+ return nil
}
// UpdateRepositoryUpdatedTime updates a repository's updated time
@@ -40,15 +35,15 @@ func UpdateRepositoryUpdatedTime(ctx context.Context, repoID int64, updateTime t
return err
}
-// UpdateRepositoryCols updates repository's columns
-func UpdateRepositoryCols(ctx context.Context, repo *Repository, cols ...string) error {
- _, err := db.GetEngine(ctx).ID(repo.ID).Cols(cols...).Update(repo)
+// UpdateRepositoryColsWithAutoTime updates repository's columns and the timestamp fields automatically
+func UpdateRepositoryColsWithAutoTime(ctx context.Context, repo *Repository, colName string, moreColNames ...string) error {
+ _, err := db.GetEngine(ctx).ID(repo.ID).Cols(append([]string{colName}, moreColNames...)...).Update(repo)
return err
}
-// UpdateRepositoryColsNoAutoTime updates repository's columns and but applies time change automatically
-func UpdateRepositoryColsNoAutoTime(ctx context.Context, repo *Repository, cols ...string) error {
- _, err := db.GetEngine(ctx).ID(repo.ID).Cols(cols...).NoAutoTime().Update(repo)
+// UpdateRepositoryColsNoAutoTime updates repository's columns, doesn't change timestamp field automatically
+func UpdateRepositoryColsNoAutoTime(ctx context.Context, repo *Repository, colName string, moreColNames ...string) error {
+ _, err := db.GetEngine(ctx).ID(repo.ID).Cols(append([]string{colName}, moreColNames...)...).NoAutoTime().Update(repo)
return err
}
@@ -111,31 +106,31 @@ func (err ErrRepoFilesAlreadyExist) Unwrap() error {
return util.ErrAlreadyExist
}
-// CheckCreateRepository check if could created a repository
-func CheckCreateRepository(ctx context.Context, doer, u *user_model.User, name string, overwriteOrAdopt bool) error {
- if !doer.CanCreateRepo() {
- return ErrReachLimitOfRepo{u.MaxRepoCreation}
+// CheckCreateRepository check if doer could create a repository in new owner
+func CheckCreateRepository(ctx context.Context, doer, owner *user_model.User, name string, overwriteOrAdopt bool) error {
+ if !doer.CanCreateRepoIn(owner) {
+ return ErrReachLimitOfRepo{owner.MaxRepoCreation}
}
if err := IsUsableRepoName(name); err != nil {
return err
}
- has, err := IsRepositoryModelOrDirExist(ctx, u, name)
+ has, err := IsRepositoryModelOrDirExist(ctx, owner, name)
if err != nil {
return fmt.Errorf("IsRepositoryExist: %w", err)
} else if has {
- return ErrRepoAlreadyExist{u.Name, name}
+ return ErrRepoAlreadyExist{owner.Name, name}
}
- repoPath := RepoPath(u.Name, name)
+ repoPath := RepoPath(owner.Name, name)
isExist, err := util.IsExist(repoPath)
if err != nil {
log.Error("Unable to check if %s exists. Error: %v", repoPath, err)
return err
}
if !overwriteOrAdopt && isExist {
- return ErrRepoFilesAlreadyExist{u.Name, name}
+ return ErrRepoFilesAlreadyExist{owner.Name, name}
}
return nil
}
diff --git a/models/repo/upload.go b/models/repo/upload.go
index 18834f6b83..f7d4749842 100644
--- a/models/repo/upload.go
+++ b/models/repo/upload.go
@@ -10,7 +10,7 @@ import (
"io"
"mime/multipart"
"os"
- "path"
+ "path/filepath"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/log"
@@ -51,14 +51,10 @@ func init() {
db.RegisterModel(new(Upload))
}
-// UploadLocalPath returns where uploads is stored in local file system based on given UUID.
-func UploadLocalPath(uuid string) string {
- return path.Join(setting.Repository.Upload.TempPath, uuid[0:1], uuid[1:2], uuid)
-}
-
-// LocalPath returns where uploads are temporarily stored in local file system.
+// LocalPath returns where uploads are temporarily stored in local file system based on given UUID.
func (upload *Upload) LocalPath() string {
- return UploadLocalPath(upload.UUID)
+ uuid := upload.UUID
+ return setting.AppDataTempDir("repo-uploads").JoinPath(uuid[0:1], uuid[1:2], uuid)
}
// NewUpload creates a new upload object.
@@ -69,7 +65,7 @@ func NewUpload(ctx context.Context, name string, buf []byte, file multipart.File
}
localPath := upload.LocalPath()
- if err = os.MkdirAll(path.Dir(localPath), os.ModePerm); err != nil {
+ if err = os.MkdirAll(filepath.Dir(localPath), os.ModePerm); err != nil {
return nil, fmt.Errorf("MkdirAll: %w", err)
}
@@ -121,24 +117,14 @@ func DeleteUploads(ctx context.Context, uploads ...*Upload) (err error) {
return nil
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
ids := make([]int64, len(uploads))
- for i := 0; i < len(uploads); i++ {
+ for i := range uploads {
ids[i] = uploads[i].ID
}
if err = db.DeleteByIDs[Upload](ctx, ids...); err != nil {
return fmt.Errorf("delete uploads: %w", err)
}
- if err = committer.Commit(); err != nil {
- return err
- }
-
for _, upload := range uploads {
localPath := upload.LocalPath()
isFile, err := util.IsFile(localPath)
diff --git a/models/repo/watch_test.go b/models/repo/watch_test.go
index c39ef607e8..7ed72386c9 100644
--- a/models/repo/watch_test.go
+++ b/models/repo/watch_test.go
@@ -36,7 +36,7 @@ func TestGetWatchers(t *testing.T) {
// One watchers are inactive, thus minus 1
assert.Len(t, watches, repo.NumWatches-1)
for _, watch := range watches {
- assert.EqualValues(t, repo.ID, watch.RepoID)
+ assert.Equal(t, repo.ID, watch.RepoID)
}
watches, err = repo_model.GetWatchers(db.DefaultContext, unittest.NonexistentID)
diff --git a/models/repo/wiki.go b/models/repo/wiki.go
index 4239a815b2..832e15ae0d 100644
--- a/models/repo/wiki.go
+++ b/models/repo/wiki.go
@@ -46,7 +46,7 @@ func IsErrWikiReservedName(err error) bool {
}
func (err ErrWikiReservedName) Error() string {
- return fmt.Sprintf("wiki title is reserved: %s", err.Title)
+ return "wiki title is reserved: " + err.Title
}
func (err ErrWikiReservedName) Unwrap() error {
@@ -65,7 +65,7 @@ func IsErrWikiInvalidFileName(err error) bool {
}
func (err ErrWikiInvalidFileName) Error() string {
- return fmt.Sprintf("Invalid wiki filename: %s", err.FileName)
+ return "Invalid wiki filename: " + err.FileName
}
func (err ErrWikiInvalidFileName) Unwrap() error {
diff --git a/models/repo_test.go b/models/repo_test.go
index bcf62237f0..b6c53fd197 100644
--- a/models/repo_test.go
+++ b/models/repo_test.go
@@ -29,10 +29,10 @@ func Test_repoStatsCorrectIssueNumComments(t *testing.T) {
issue2 := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2})
assert.NotNil(t, issue2)
- assert.EqualValues(t, 0, issue2.NumComments) // the fixture data is wrong, but we don't fix it here
+ assert.Equal(t, 0, issue2.NumComments) // the fixture data is wrong, but we don't fix it here
assert.NoError(t, repoStatsCorrectIssueNumComments(db.DefaultContext, 2))
// reload the issue
issue2 = unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2})
- assert.EqualValues(t, 1, issue2.NumComments)
+ assert.Equal(t, 1, issue2.NumComments)
}
diff --git a/models/secret/secret.go b/models/secret/secret.go
index eab9cf0712..10a0287dfd 100644
--- a/models/secret/secret.go
+++ b/models/secret/secret.go
@@ -40,9 +40,15 @@ type Secret struct {
RepoID int64 `xorm:"INDEX UNIQUE(owner_repo_name) NOT NULL DEFAULT 0"`
Name string `xorm:"UNIQUE(owner_repo_name) NOT NULL"`
Data string `xorm:"LONGTEXT"` // encrypted data
+ Description string `xorm:"TEXT"`
CreatedUnix timeutil.TimeStamp `xorm:"created NOT NULL"`
}
+const (
+ SecretDataMaxLength = 65536
+ SecretDescriptionMaxLength = 4096
+)
+
// ErrSecretNotFound represents a "secret not found" error.
type ErrSecretNotFound struct {
Name string
@@ -57,7 +63,7 @@ func (err ErrSecretNotFound) Unwrap() error {
}
// InsertEncryptedSecret Creates, encrypts, and validates a new secret with yet unencrypted data and insert into database
-func InsertEncryptedSecret(ctx context.Context, ownerID, repoID int64, name, data string) (*Secret, error) {
+func InsertEncryptedSecret(ctx context.Context, ownerID, repoID int64, name, data, description string) (*Secret, error) {
if ownerID != 0 && repoID != 0 {
// It's trying to create a secret that belongs to a repository, but OwnerID has been set accidentally.
// Remove OwnerID to avoid confusion; it's not worth returning an error here.
@@ -67,15 +73,23 @@ func InsertEncryptedSecret(ctx context.Context, ownerID, repoID int64, name, dat
return nil, fmt.Errorf("%w: ownerID and repoID cannot be both zero, global secrets are not supported", util.ErrInvalidArgument)
}
+ if len(data) > SecretDataMaxLength {
+ return nil, util.NewInvalidArgumentErrorf("data too long")
+ }
+
+ description = util.TruncateRunes(description, SecretDescriptionMaxLength)
+
encrypted, err := secret_module.EncryptSecret(setting.SecretKey, data)
if err != nil {
return nil, err
}
+
secret := &Secret{
- OwnerID: ownerID,
- RepoID: repoID,
- Name: strings.ToUpper(name),
- Data: encrypted,
+ OwnerID: ownerID,
+ RepoID: repoID,
+ Name: strings.ToUpper(name),
+ Data: encrypted,
+ Description: description,
}
return secret, db.Insert(ctx, secret)
}
@@ -114,16 +128,23 @@ func (opts FindSecretsOptions) ToConds() builder.Cond {
}
// UpdateSecret changes org or user reop secret.
-func UpdateSecret(ctx context.Context, secretID int64, data string) error {
+func UpdateSecret(ctx context.Context, secretID int64, data, description string) error {
+ if len(data) > SecretDataMaxLength {
+ return util.NewInvalidArgumentErrorf("data too long")
+ }
+
+ description = util.TruncateRunes(description, SecretDescriptionMaxLength)
+
encrypted, err := secret_module.EncryptSecret(setting.SecretKey, data)
if err != nil {
return err
}
s := &Secret{
- Data: encrypted,
+ Data: encrypted,
+ Description: description,
}
- affected, err := db.GetEngine(ctx).ID(secretID).Cols("data").Update(s)
+ affected, err := db.GetEngine(ctx).ID(secretID).Cols("data", "description").Update(s)
if affected != 1 {
return ErrSecretNotFound{}
}
diff --git a/models/system/notice.go b/models/system/notice.go
index e7ec6a9693..91bf4be0f6 100644
--- a/models/system/notice.go
+++ b/models/system/notice.go
@@ -29,7 +29,7 @@ const (
type Notice struct {
ID int64 `xorm:"pk autoincr"`
Type NoticeType
- Description string `xorm:"TEXT"`
+ Description string `xorm:"LONGTEXT"`
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
}
diff --git a/models/system/setting_test.go b/models/system/setting_test.go
index 8f04412fb4..7e7e0c8fca 100644
--- a/models/system/setting_test.go
+++ b/models/system/setting_test.go
@@ -21,24 +21,24 @@ func TestSettings(t *testing.T) {
rev, settings, err := system.GetAllSettings(db.DefaultContext)
assert.NoError(t, err)
- assert.EqualValues(t, 1, rev)
+ assert.Equal(t, 1, rev)
assert.Len(t, settings, 1) // there is only one "revision" key
err = system.SetSettings(db.DefaultContext, map[string]string{keyName: "true"})
assert.NoError(t, err)
rev, settings, err = system.GetAllSettings(db.DefaultContext)
assert.NoError(t, err)
- assert.EqualValues(t, 2, rev)
+ assert.Equal(t, 2, rev)
assert.Len(t, settings, 2)
- assert.EqualValues(t, "true", settings[keyName])
+ assert.Equal(t, "true", settings[keyName])
err = system.SetSettings(db.DefaultContext, map[string]string{keyName: "false"})
assert.NoError(t, err)
rev, settings, err = system.GetAllSettings(db.DefaultContext)
assert.NoError(t, err)
- assert.EqualValues(t, 3, rev)
+ assert.Equal(t, 3, rev)
assert.Len(t, settings, 2)
- assert.EqualValues(t, "false", settings[keyName])
+ assert.Equal(t, "false", settings[keyName])
// setting the same value should not trigger DuplicateKey error, and the "version" should be increased
err = system.SetSettings(db.DefaultContext, map[string]string{keyName: "false"})
@@ -47,5 +47,5 @@ func TestSettings(t *testing.T) {
rev, settings, err = system.GetAllSettings(db.DefaultContext)
assert.NoError(t, err)
assert.Len(t, settings, 2)
- assert.EqualValues(t, 4, rev)
+ assert.Equal(t, 4, rev)
}
diff --git a/models/unit/unit.go b/models/unit/unit.go
index c816fc6c68..c0560678ca 100644
--- a/models/unit/unit.go
+++ b/models/unit/unit.go
@@ -6,6 +6,7 @@ package unit
import (
"errors"
"fmt"
+ "slices"
"strings"
"sync/atomic"
@@ -20,17 +21,21 @@ type Type int
// Enumerate all the unit types
const (
- TypeInvalid Type = iota // 0 invalid
- TypeCode // 1 code
- TypeIssues // 2 issues
- TypePullRequests // 3 PRs
- TypeReleases // 4 Releases
- TypeWiki // 5 Wiki
- TypeExternalWiki // 6 ExternalWiki
- TypeExternalTracker // 7 ExternalTracker
- TypeProjects // 8 Projects
- TypePackages // 9 Packages
- TypeActions // 10 Actions
+ TypeInvalid Type = iota // 0 invalid
+
+ TypeCode // 1 code
+ TypeIssues // 2 issues
+ TypePullRequests // 3 PRs
+ TypeReleases // 4 Releases
+ TypeWiki // 5 Wiki
+ TypeExternalWiki // 6 ExternalWiki
+ TypeExternalTracker // 7 ExternalTracker
+ TypeProjects // 8 Projects
+ TypePackages // 9 Packages
+ TypeActions // 10 Actions
+
+ // FIXME: TEAM-UNIT-PERMISSION: the team unit "admin" permission's design is not right, when a new unit is added in the future,
+ // admin team won't inherit the correct admin permission for the new unit, need to have a complete fix before adding any new unit.
)
// Value returns integer value for unit type (used by template)
@@ -200,22 +205,12 @@ func LoadUnitConfig() error {
// UnitGlobalDisabled checks if unit type is global disabled
func (u Type) UnitGlobalDisabled() bool {
- for _, ud := range DisabledRepoUnitsGet() {
- if u == ud {
- return true
- }
- }
- return false
+ return slices.Contains(DisabledRepoUnitsGet(), u)
}
// CanBeDefault checks if the unit type can be a default repo unit
func (u *Type) CanBeDefault() bool {
- for _, nadU := range NotAllowedDefaultRepoUnits {
- if *u == nadU {
- return false
- }
- }
- return true
+ return !slices.Contains(NotAllowedDefaultRepoUnits, *u)
}
// Unit is a section of one repository
@@ -380,20 +375,3 @@ func AllUnitKeyNames() []string {
}
return res
}
-
-// MinUnitAccessMode returns the minial permission of the permission map
-func MinUnitAccessMode(unitsMap map[Type]perm.AccessMode) perm.AccessMode {
- res := perm.AccessModeNone
- for t, mode := range unitsMap {
- // Don't allow `TypeExternal{Tracker,Wiki}` to influence this as they can only be set to READ perms.
- if t == TypeExternalTracker || t == TypeExternalWiki {
- continue
- }
-
- // get the minial permission great than AccessModeNone except all are AccessModeNone
- if mode > perm.AccessModeNone && (res == perm.AccessModeNone || mode < res) {
- res = mode
- }
- }
- return res
-}
diff --git a/models/unittest/consistency.go b/models/unittest/consistency.go
index 71839001be..364afb5c52 100644
--- a/models/unittest/consistency.go
+++ b/models/unittest/consistency.go
@@ -11,6 +11,7 @@ import (
"code.gitea.io/gitea/models/db"
"github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
"xorm.io/builder"
)
@@ -24,7 +25,7 @@ const (
var consistencyCheckMap = make(map[string]func(t assert.TestingT, bean any))
// CheckConsistencyFor test that all matching database entries are consistent
-func CheckConsistencyFor(t assert.TestingT, beansToCheck ...any) {
+func CheckConsistencyFor(t require.TestingT, beansToCheck ...any) {
for _, bean := range beansToCheck {
sliceType := reflect.SliceOf(reflect.TypeOf(bean))
sliceValue := reflect.MakeSlice(sliceType, 0, 10)
@@ -42,13 +43,11 @@ func CheckConsistencyFor(t assert.TestingT, beansToCheck ...any) {
}
}
-func checkForConsistency(t assert.TestingT, bean any) {
+func checkForConsistency(t require.TestingT, bean any) {
tb, err := db.TableInfo(bean)
assert.NoError(t, err)
f := consistencyCheckMap[tb.Name]
- if f == nil {
- assert.FailNow(t, "unknown bean type: %#v", bean)
- }
+ require.NotNil(t, f, "unknown bean type: %#v", bean)
f(t, bean)
}
@@ -71,8 +70,8 @@ func init() {
AssertCountByCond(t, "follow", builder.Eq{"user_id": user.int("ID")}, user.int("NumFollowing"))
AssertCountByCond(t, "follow", builder.Eq{"follow_id": user.int("ID")}, user.int("NumFollowers"))
if user.int("Type") != modelsUserTypeOrganization {
- assert.EqualValues(t, 0, user.int("NumMembers"), "Unexpected number of members for user id: %d", user.int("ID"))
- assert.EqualValues(t, 0, user.int("NumTeams"), "Unexpected number of teams for user id: %d", user.int("ID"))
+ assert.Equal(t, 0, user.int("NumMembers"), "Unexpected number of members for user id: %d", user.int("ID"))
+ assert.Equal(t, 0, user.int("NumTeams"), "Unexpected number of teams for user id: %d", user.int("ID"))
}
}
@@ -119,7 +118,7 @@ func init() {
assert.EqualValues(t, issue.int("NumComments"), actual, "Unexpected number of comments for issue id: %d", issue.int("ID"))
if issue.bool("IsPull") {
prRow := AssertExistsAndLoadMap(t, "pull_request", builder.Eq{"issue_id": issue.int("ID")})
- assert.EqualValues(t, parseInt(prRow["index"]), issue.int("Index"), "Unexpected index for issue id: %d", issue.int("ID"))
+ assert.Equal(t, parseInt(prRow["index"]), issue.int("Index"), "Unexpected index for issue id: %d", issue.int("ID"))
}
}
@@ -127,7 +126,7 @@ func init() {
pr := reflectionWrap(bean)
issueRow := AssertExistsAndLoadMap(t, "issue", builder.Eq{"id": pr.int("IssueID")})
assert.True(t, parseBool(issueRow["is_pull"]))
- assert.EqualValues(t, parseInt(issueRow["index"]), pr.int("Index"), "Unexpected index for pull request id: %d", pr.int("ID"))
+ assert.Equal(t, parseInt(issueRow["index"]), pr.int("Index"), "Unexpected index for pull request id: %d", pr.int("ID"))
}
checkForMilestoneConsistency := func(t assert.TestingT, bean any) {
diff --git a/models/unittest/fixtures_loader.go b/models/unittest/fixtures_loader.go
index 14686caf63..0560da8349 100644
--- a/models/unittest/fixtures_loader.go
+++ b/models/unittest/fixtures_loader.go
@@ -12,13 +12,17 @@ import (
"slices"
"strings"
+ "code.gitea.io/gitea/models/db"
+
"gopkg.in/yaml.v3"
"xorm.io/xorm"
"xorm.io/xorm/schemas"
)
-type fixtureItem struct {
- tableName string
+type FixtureItem struct {
+ fileFullPath string
+ tableName string
+
tableNameQuoted string
sqlInserts []string
sqlInsertArgs [][]any
@@ -27,10 +31,11 @@ type fixtureItem struct {
}
type fixturesLoaderInternal struct {
+ xormEngine *xorm.Engine
+ xormTableNames map[string]bool
db *sql.DB
dbType schemas.DBType
- files []string
- fixtures map[string]*fixtureItem
+ fixtures map[string]*FixtureItem
quoteObject func(string) string
paramPlaceholder func(idx int) string
}
@@ -59,29 +64,27 @@ func (f *fixturesLoaderInternal) preprocessFixtureRow(row []map[string]any) (err
return nil
}
-func (f *fixturesLoaderInternal) prepareFixtureItem(file string) (_ *fixtureItem, err error) {
- fixture := &fixtureItem{}
- fixture.tableName, _, _ = strings.Cut(filepath.Base(file), ".")
+func (f *fixturesLoaderInternal) prepareFixtureItem(fixture *FixtureItem) (err error) {
fixture.tableNameQuoted = f.quoteObject(fixture.tableName)
if f.dbType == schemas.MSSQL {
fixture.mssqlHasIdentityColumn, err = f.mssqlTableHasIdentityColumn(f.db, fixture.tableName)
if err != nil {
- return nil, err
+ return err
}
}
- data, err := os.ReadFile(file)
+ data, err := os.ReadFile(fixture.fileFullPath)
if err != nil {
- return nil, fmt.Errorf("failed to read file %q: %w", file, err)
+ return fmt.Errorf("failed to read file %q: %w", fixture.fileFullPath, err)
}
var rows []map[string]any
if err = yaml.Unmarshal(data, &rows); err != nil {
- return nil, fmt.Errorf("failed to unmarshal yaml data from %q: %w", file, err)
+ return fmt.Errorf("failed to unmarshal yaml data from %q: %w", fixture.fileFullPath, err)
}
if err = f.preprocessFixtureRow(rows); err != nil {
- return nil, fmt.Errorf("failed to preprocess fixture rows from %q: %w", file, err)
+ return fmt.Errorf("failed to preprocess fixture rows from %q: %w", fixture.fileFullPath, err)
}
var sqlBuf []byte
@@ -107,19 +110,17 @@ func (f *fixturesLoaderInternal) prepareFixtureItem(file string) (_ *fixtureItem
sqlBuf = sqlBuf[:0]
sqlArguments = sqlArguments[:0]
}
- return fixture, nil
+ return nil
}
-func (f *fixturesLoaderInternal) loadFixtures(tx *sql.Tx, file string) (err error) {
- fixture := f.fixtures[file]
- if fixture == nil {
- if fixture, err = f.prepareFixtureItem(file); err != nil {
+func (f *fixturesLoaderInternal) loadFixtures(tx *sql.Tx, fixture *FixtureItem) (err error) {
+ if fixture.tableNameQuoted == "" {
+ if err = f.prepareFixtureItem(fixture); err != nil {
return err
}
- f.fixtures[file] = fixture
}
- _, err = tx.Exec(fmt.Sprintf("DELETE FROM %s", fixture.tableNameQuoted)) // sqlite3 doesn't support truncate
+ _, err = tx.Exec("DELETE FROM " + fixture.tableNameQuoted) // sqlite3 doesn't support truncate
if err != nil {
return err
}
@@ -147,15 +148,26 @@ func (f *fixturesLoaderInternal) Load() error {
}
defer func() { _ = tx.Rollback() }()
- for _, file := range f.files {
- if err := f.loadFixtures(tx, file); err != nil {
- return fmt.Errorf("failed to load fixtures from %s: %w", file, err)
+ for _, fixture := range f.fixtures {
+ if !f.xormTableNames[fixture.tableName] {
+ continue
+ }
+ if err := f.loadFixtures(tx, fixture); err != nil {
+ return fmt.Errorf("failed to load fixtures from %s: %w", fixture.fileFullPath, err)
}
}
- return tx.Commit()
+ if err = tx.Commit(); err != nil {
+ return err
+ }
+ for xormTableName := range f.xormTableNames {
+ if f.fixtures[xormTableName] == nil {
+ _, _ = f.xormEngine.Exec("DELETE FROM `" + xormTableName + "`")
+ }
+ }
+ return nil
}
-func FixturesFileFullPaths(dir string, files []string) ([]string, error) {
+func FixturesFileFullPaths(dir string, files []string) (map[string]*FixtureItem, error) {
if files != nil && len(files) == 0 {
return nil, nil // load nothing
}
@@ -169,20 +181,25 @@ func FixturesFileFullPaths(dir string, files []string) ([]string, error) {
files = append(files, e.Name())
}
}
- for i, file := range files {
- if !filepath.IsAbs(file) {
- files[i] = filepath.Join(dir, file)
+ fixtureItems := map[string]*FixtureItem{}
+ for _, file := range files {
+ fileFillPath := file
+ if !filepath.IsAbs(fileFillPath) {
+ fileFillPath = filepath.Join(dir, file)
}
+ tableName, _, _ := strings.Cut(filepath.Base(file), ".")
+ fixtureItems[tableName] = &FixtureItem{fileFullPath: fileFillPath, tableName: tableName}
}
- return files, nil
+ return fixtureItems, nil
}
func NewFixturesLoader(x *xorm.Engine, opts FixturesOptions) (FixturesLoader, error) {
- files, err := FixturesFileFullPaths(opts.Dir, opts.Files)
+ fixtureItems, err := FixturesFileFullPaths(opts.Dir, opts.Files)
if err != nil {
return nil, fmt.Errorf("failed to get fixtures files: %w", err)
}
- f := &fixturesLoaderInternal{db: x.DB().DB, dbType: x.Dialect().URI().DBType, files: files, fixtures: map[string]*fixtureItem{}}
+
+ f := &fixturesLoaderInternal{xormEngine: x, db: x.DB().DB, dbType: x.Dialect().URI().DBType, fixtures: fixtureItems}
switch f.dbType {
case schemas.SQLITE:
f.quoteObject = func(s string) string { return fmt.Sprintf(`"%s"`, s) }
@@ -197,5 +214,12 @@ func NewFixturesLoader(x *xorm.Engine, opts FixturesOptions) (FixturesLoader, er
f.quoteObject = func(s string) string { return fmt.Sprintf("[%s]", s) }
f.paramPlaceholder = func(idx int) string { return "?" }
}
+
+ xormBeans, _ := db.NamesToBean()
+ f.xormTableNames = map[string]bool{}
+ for _, bean := range xormBeans {
+ f.xormTableNames[db.TableName(bean)] = true
+ }
+
return f, nil
}
diff --git a/models/unittest/fscopy.go b/models/unittest/fscopy.go
index b7ba6b7ef5..98b01815bd 100644
--- a/models/unittest/fscopy.go
+++ b/models/unittest/fscopy.go
@@ -28,7 +28,7 @@ func SyncFile(srcPath, destPath string) error {
}
if src.Size() == dest.Size() &&
- src.ModTime() == dest.ModTime() &&
+ src.ModTime().Equal(dest.ModTime()) &&
src.Mode() == dest.Mode() {
return nil
}
diff --git a/models/unittest/testdb.go b/models/unittest/testdb.go
index 7a9ca9698d..cb60cf5f85 100644
--- a/models/unittest/testdb.go
+++ b/models/unittest/testdb.go
@@ -20,6 +20,7 @@ import (
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/setting/config"
"code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/tempdir"
"code.gitea.io/gitea/modules/test"
"code.gitea.io/gitea/modules/util"
@@ -35,8 +36,8 @@ func fatalTestError(fmtStr string, args ...any) {
os.Exit(1)
}
-// InitSettings initializes config provider and load common settings for tests
-func InitSettings() {
+// InitSettingsForTesting initializes config provider and load common settings for tests
+func InitSettingsForTesting() {
setting.IsInTesting = true
log.OsExiter = func(code int) {
if code != 0 {
@@ -75,7 +76,7 @@ func MainTest(m *testing.M, testOptsArg ...*TestOptions) {
testOpts := util.OptionalArg(testOptsArg, &TestOptions{})
giteaRoot = test.SetupGiteaRoot()
setting.CustomPath = filepath.Join(giteaRoot, "custom")
- InitSettings()
+ InitSettingsForTesting()
fixturesOpts := FixturesOptions{Dir: filepath.Join(giteaRoot, "models", "fixtures"), Files: testOpts.FixtureFiles}
if err := CreateTestEngine(fixturesOpts); err != nil {
@@ -92,15 +93,19 @@ func MainTest(m *testing.M, testOptsArg ...*TestOptions) {
setting.SSH.Domain = "try.gitea.io"
setting.Database.Type = "sqlite3"
setting.Repository.DefaultBranch = "master" // many test code still assume that default branch is called "master"
- repoRootPath, err := os.MkdirTemp(os.TempDir(), "repos")
+ repoRootPath, cleanup1, err := tempdir.OsTempDir("gitea-test").MkdirTempRandom("repos")
if err != nil {
fatalTestError("TempDir: %v\n", err)
}
+ defer cleanup1()
+
setting.RepoRootPath = repoRootPath
- appDataPath, err := os.MkdirTemp(os.TempDir(), "appdata")
+ appDataPath, cleanup2, err := tempdir.OsTempDir("gitea-test").MkdirTempRandom("appdata")
if err != nil {
fatalTestError("TempDir: %v\n", err)
}
+ defer cleanup2()
+
setting.AppDataPath = appDataPath
setting.AppWorkPath = giteaRoot
setting.StaticRootPath = giteaRoot
@@ -153,13 +158,6 @@ func MainTest(m *testing.M, testOptsArg ...*TestOptions) {
fatalTestError("tear down failed: %v\n", err)
}
}
-
- if err = util.RemoveAll(repoRootPath); err != nil {
- fatalTestError("util.RemoveAll: %v\n", err)
- }
- if err = util.RemoveAll(appDataPath); err != nil {
- fatalTestError("util.RemoveAll: %v\n", err)
- }
os.Exit(exitStatus)
}
diff --git a/models/unittest/unit_tests.go b/models/unittest/unit_tests.go
index 1c5595aef8..4a4cec40ae 100644
--- a/models/unittest/unit_tests.go
+++ b/models/unittest/unit_tests.go
@@ -153,9 +153,9 @@ func DumpQueryResult(t require.TestingT, sqlOrBean any, sqlArgs ...any) {
goDB := x.DB().DB
sql, ok := sqlOrBean.(string)
if !ok {
- sql = fmt.Sprintf("SELECT * FROM %s", db.TableName(sqlOrBean))
+ sql = "SELECT * FROM " + db.TableName(sqlOrBean)
} else if !strings.Contains(sql, " ") {
- sql = fmt.Sprintf("SELECT * FROM %s", sql)
+ sql = "SELECT * FROM " + sql
}
rows, err := goDB.Query(sql, sqlArgs...)
require.NoError(t, err)
diff --git a/models/user/avatar.go b/models/user/avatar.go
index 2a41b99129..542bd93b98 100644
--- a/models/user/avatar.go
+++ b/models/user/avatar.go
@@ -5,7 +5,6 @@ package user
import (
"context"
- "crypto/md5"
"fmt"
"image/png"
"io"
@@ -61,7 +60,9 @@ func GenerateRandomAvatar(ctx context.Context, u *User) error {
// AvatarLinkWithSize returns a link to the user's avatar with size. size <= 0 means default size
func (u *User) AvatarLinkWithSize(ctx context.Context, size int) string {
- if u.IsGhost() || u.IsGiteaActions() {
+ // ghost user was deleted, Gitea actions is a bot user, 0 means the user should be a virtual user
+ // which comes from git configure information
+ if u.IsGhost() || u.IsGiteaActions() || u.ID <= 0 {
return avatars.DefaultAvatarLink()
}
@@ -104,7 +105,7 @@ func (u *User) IsUploadAvatarChanged(data []byte) bool {
if !u.UseCustomAvatar || len(u.Avatar) == 0 {
return true
}
- avatarID := fmt.Sprintf("%x", md5.Sum([]byte(fmt.Sprintf("%d-%x", u.ID, md5.Sum(data)))))
+ avatarID := avatar.HashAvatar(u.ID, data)
return u.Avatar != avatarID
}
diff --git a/models/user/badge.go b/models/user/badge.go
index 3ff3530a36..e475ceb748 100644
--- a/models/user/badge.go
+++ b/models/user/badge.go
@@ -19,7 +19,7 @@ type Badge struct {
}
// UserBadge represents a user badge
-type UserBadge struct { //nolint:revive
+type UserBadge struct { //nolint:revive // export stutter
ID int64 `xorm:"pk autoincr"`
BadgeID int64
UserID int64 `xorm:"INDEX"`
diff --git a/models/user/email_address.go b/models/user/email_address.go
index 2ba6a56450..cb423945f8 100644
--- a/models/user/email_address.go
+++ b/models/user/email_address.go
@@ -256,15 +256,9 @@ func IsEmailUsed(ctx context.Context, email string) (bool, error) {
// ActivateEmail activates the email address to given user.
func ActivateEmail(ctx context.Context, email *EmailAddress) error {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
- if err := updateActivation(ctx, email, true); err != nil {
- return err
- }
- return committer.Commit()
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ return updateActivation(ctx, email, true)
+ })
}
func updateActivation(ctx context.Context, email *EmailAddress, activate bool) error {
@@ -305,33 +299,30 @@ func makeEmailPrimaryInternal(ctx context.Context, emailID int64, isActive bool)
return ErrUserNotExist{UID: email.UID}
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
- sess := db.GetEngine(ctx)
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ sess := db.GetEngine(ctx)
- // 1. Update user table
- user.Email = email.Email
- if _, err = sess.ID(user.ID).Cols("email").Update(user); err != nil {
- return err
- }
+ // 1. Update user table
+ user.Email = email.Email
+ if _, err := sess.ID(user.ID).Cols("email").Update(user); err != nil {
+ return err
+ }
- // 2. Update old primary email
- if _, err = sess.Where("uid=? AND is_primary=?", email.UID, true).Cols("is_primary").Update(&EmailAddress{
- IsPrimary: false,
- }); err != nil {
- return err
- }
+ // 2. Update old primary email
+ if _, err := sess.Where("uid=? AND is_primary=?", email.UID, true).Cols("is_primary").Update(&EmailAddress{
+ IsPrimary: false,
+ }); err != nil {
+ return err
+ }
- // 3. update new primary email
- email.IsPrimary = true
- if _, err = sess.ID(email.ID).Cols("is_primary").Update(email); err != nil {
- return err
- }
+ // 3. update new primary email
+ email.IsPrimary = true
+ if _, err := sess.ID(email.ID).Cols("is_primary").Update(email); err != nil {
+ return err
+ }
- return committer.Commit()
+ return nil
+ })
}
// ChangeInactivePrimaryEmail replaces the inactive primary email of a given user
diff --git a/models/user/email_address_test.go b/models/user/email_address_test.go
index d72d873de2..c0666246b0 100644
--- a/models/user/email_address_test.go
+++ b/models/user/email_address_test.go
@@ -4,6 +4,7 @@
package user_test
import (
+ "slices"
"testing"
"code.gitea.io/gitea/models/db"
@@ -100,12 +101,7 @@ func TestListEmails(t *testing.T) {
assert.Greater(t, count, int64(5))
contains := func(match func(s *user_model.SearchEmailResult) bool) bool {
- for _, v := range emails {
- if match(v) {
- return true
- }
- }
- return false
+ return slices.ContainsFunc(emails, match)
}
assert.True(t, contains(func(s *user_model.SearchEmailResult) bool { return s.UID == 18 }))
@@ -205,7 +201,7 @@ func TestEmailAddressValidate(t *testing.T) {
}
for kase, err := range kases {
t.Run(kase, func(t *testing.T) {
- assert.EqualValues(t, err, user_model.ValidateEmail(kase))
+ assert.Equal(t, err, user_model.ValidateEmail(kase))
})
}
}
diff --git a/models/user/follow.go b/models/user/follow.go
index cf9672109a..e098caab5b 100644
--- a/models/user/follow.go
+++ b/models/user/follow.go
@@ -38,24 +38,20 @@ func FollowUser(ctx context.Context, user, follow *User) (err error) {
return ErrBlockedUser
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- if err = db.Insert(ctx, &Follow{UserID: user.ID, FollowID: follow.ID}); err != nil {
- return err
- }
-
- if _, err = db.Exec(ctx, "UPDATE `user` SET num_followers = num_followers + 1 WHERE id = ?", follow.ID); err != nil {
- return err
- }
-
- if _, err = db.Exec(ctx, "UPDATE `user` SET num_following = num_following + 1 WHERE id = ?", user.ID); err != nil {
- return err
- }
- return committer.Commit()
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ if err = db.Insert(ctx, &Follow{UserID: user.ID, FollowID: follow.ID}); err != nil {
+ return err
+ }
+
+ if _, err = db.Exec(ctx, "UPDATE `user` SET num_followers = num_followers + 1 WHERE id = ?", follow.ID); err != nil {
+ return err
+ }
+
+ if _, err = db.Exec(ctx, "UPDATE `user` SET num_following = num_following + 1 WHERE id = ?", user.ID); err != nil {
+ return err
+ }
+ return nil
+ })
}
// UnfollowUser unmarks someone as another's follower.
@@ -64,22 +60,18 @@ func UnfollowUser(ctx context.Context, userID, followID int64) (err error) {
return nil
}
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ if _, err = db.DeleteByBean(ctx, &Follow{UserID: userID, FollowID: followID}); err != nil {
+ return err
+ }
- if _, err = db.DeleteByBean(ctx, &Follow{UserID: userID, FollowID: followID}); err != nil {
- return err
- }
+ if _, err = db.Exec(ctx, "UPDATE `user` SET num_followers = num_followers - 1 WHERE id = ?", followID); err != nil {
+ return err
+ }
- if _, err = db.Exec(ctx, "UPDATE `user` SET num_followers = num_followers - 1 WHERE id = ?", followID); err != nil {
- return err
- }
-
- if _, err = db.Exec(ctx, "UPDATE `user` SET num_following = num_following - 1 WHERE id = ?", userID); err != nil {
- return err
- }
- return committer.Commit()
+ if _, err = db.Exec(ctx, "UPDATE `user` SET num_following = num_following - 1 WHERE id = ?", userID); err != nil {
+ return err
+ }
+ return nil
+ })
}
diff --git a/models/user/search.go b/models/user/search.go
index 85915f4020..cfd0d011bc 100644
--- a/models/user/search.go
+++ b/models/user/search.go
@@ -45,13 +45,14 @@ func (opts *SearchUserOptions) toSearchQueryBase(ctx context.Context) *xorm.Sess
var cond builder.Cond
cond = builder.Eq{"type": opts.Type}
if opts.IncludeReserved {
- if opts.Type == UserTypeIndividual {
+ switch opts.Type {
+ case UserTypeIndividual:
cond = cond.Or(builder.Eq{"type": UserTypeUserReserved}).Or(
builder.Eq{"type": UserTypeBot},
).Or(
builder.Eq{"type": UserTypeRemoteUser},
)
- } else if opts.Type == UserTypeOrganization {
+ case UserTypeOrganization:
cond = cond.Or(builder.Eq{"type": UserTypeOrganizationReserved})
}
}
@@ -136,7 +137,7 @@ func (opts *SearchUserOptions) toSearchQueryBase(ctx context.Context) *xorm.Sess
// SearchUsers takes options i.e. keyword and part of user name to search,
// it returns results in given range and number of total results.
-func SearchUsers(ctx context.Context, opts *SearchUserOptions) (users []*User, _ int64, _ error) {
+func SearchUsers(ctx context.Context, opts SearchUserOptions) (users []*User, _ int64, _ error) {
sessCount := opts.toSearchQueryBase(ctx)
defer sessCount.Close()
count, err := sessCount.Count(new(User))
@@ -151,7 +152,7 @@ func SearchUsers(ctx context.Context, opts *SearchUserOptions) (users []*User, _
sessQuery := opts.toSearchQueryBase(ctx).OrderBy(opts.OrderBy.String())
defer sessQuery.Close()
if opts.Page > 0 {
- sessQuery = db.SetSessionPagination(sessQuery, opts)
+ sessQuery = db.SetSessionPagination(sessQuery, &opts)
}
// the sql may contain JOIN, so we must only select User related columns
diff --git a/models/user/setting.go b/models/user/setting.go
index b4af0e5ccd..c65afae76c 100644
--- a/models/user/setting.go
+++ b/models/user/setting.go
@@ -5,6 +5,7 @@ package user
import (
"context"
+ "errors"
"fmt"
"strings"
@@ -114,10 +115,10 @@ func GetUserAllSettings(ctx context.Context, uid int64) (map[string]*Setting, er
func validateUserSettingKey(key string) error {
if len(key) == 0 {
- return fmt.Errorf("setting key must be set")
+ return errors.New("setting key must be set")
}
if strings.ToLower(key) != key {
- return fmt.Errorf("setting key should be lowercase")
+ return errors.New("setting key should be lowercase")
}
return nil
}
diff --git a/models/user/setting_keys.go b/models/user/setting_options.go
index 3149aae18b..7be5039329 100644
--- a/models/user/setting_keys.go
+++ b/models/user/setting_options.go
@@ -10,6 +10,7 @@ const (
SettingsKeyDiffWhitespaceBehavior = "diff.whitespace_behaviour"
// SettingsKeyShowOutdatedComments is the setting key wether or not to show outdated comments in PRs
SettingsKeyShowOutdatedComments = "comment_code.show_outdated"
+
// UserActivityPubPrivPem is user's private key
UserActivityPubPrivPem = "activitypub.priv_pem"
// UserActivityPubPubPem is user's public key
@@ -18,4 +19,11 @@ const (
SignupIP = "signup.ip"
// SignupUserAgent is the user agent that the user signed up with
SignupUserAgent = "signup.user_agent"
+
+ SettingsKeyCodeViewShowFileTree = "code_view.show_file_tree"
+
+ SettingsKeyEmailNotificationGiteaActions = "email_notification.gitea_actions"
+ SettingEmailNotificationGiteaActionsAll = "all"
+ SettingEmailNotificationGiteaActionsFailureOnly = "failure-only" // Default for actions email preference
+ SettingEmailNotificationGiteaActionsDisabled = "disabled"
)
diff --git a/models/user/setting_test.go b/models/user/setting_test.go
index c607d9fd00..3c199013f3 100644
--- a/models/user/setting_test.go
+++ b/models/user/setting_test.go
@@ -30,15 +30,15 @@ func TestSettings(t *testing.T) {
settings, err := user_model.GetSettings(db.DefaultContext, 99, []string{keyName})
assert.NoError(t, err)
assert.Len(t, settings, 1)
- assert.EqualValues(t, newSetting.SettingValue, settings[keyName].SettingValue)
+ assert.Equal(t, newSetting.SettingValue, settings[keyName].SettingValue)
settingValue, err := user_model.GetUserSetting(db.DefaultContext, 99, keyName)
assert.NoError(t, err)
- assert.EqualValues(t, newSetting.SettingValue, settingValue)
+ assert.Equal(t, newSetting.SettingValue, settingValue)
settingValue, err = user_model.GetUserSetting(db.DefaultContext, 99, "no_such")
assert.NoError(t, err)
- assert.EqualValues(t, "", settingValue)
+ assert.Empty(t, settingValue)
// updated setting
updatedSetting := &user_model.Setting{UserID: 99, SettingKey: keyName, SettingValue: "Updated"}
@@ -49,7 +49,7 @@ func TestSettings(t *testing.T) {
settings, err = user_model.GetUserAllSettings(db.DefaultContext, 99)
assert.NoError(t, err)
assert.Len(t, settings, 1)
- assert.EqualValues(t, updatedSetting.SettingValue, settings[updatedSetting.SettingKey].SettingValue)
+ assert.Equal(t, updatedSetting.SettingValue, settings[updatedSetting.SettingKey].SettingValue)
// delete setting
err = user_model.DeleteUserSetting(db.DefaultContext, 99, keyName)
diff --git a/models/user/user.go b/models/user/user.go
index 3c72aa7cc4..c362cbc6d2 100644
--- a/models/user/user.go
+++ b/models/user/user.go
@@ -247,19 +247,20 @@ func (u *User) MaxCreationLimit() int {
return u.MaxRepoCreation
}
-// CanCreateRepo returns if user login can create a repository
-// NOTE: functions calling this assume a failure due to repository count limit; if new checks are added, those functions should be revised
-func (u *User) CanCreateRepo() bool {
+// CanCreateRepoIn checks whether the doer(u) can create a repository in the owner
+// NOTE: functions calling this assume a failure due to repository count limit; it ONLY checks the repo number LIMIT, if new checks are added, those functions should be revised
+func (u *User) CanCreateRepoIn(owner *User) bool {
if u.IsAdmin {
return true
}
- if u.MaxRepoCreation <= -1 {
- if setting.Repository.MaxCreationLimit <= -1 {
+ const noLimit = -1
+ if owner.MaxRepoCreation == noLimit {
+ if setting.Repository.MaxCreationLimit == noLimit {
return true
}
- return u.NumRepos < setting.Repository.MaxCreationLimit
+ return owner.NumRepos < setting.Repository.MaxCreationLimit
}
- return u.NumRepos < u.MaxRepoCreation
+ return owner.NumRepos < owner.MaxRepoCreation
}
// CanCreateOrganization returns true if user can create organisation.
@@ -272,13 +273,12 @@ func (u *User) CanEditGitHook() bool {
return !setting.DisableGitHooks && (u.IsAdmin || u.AllowGitHook)
}
-// CanForkRepo returns if user login can fork a repository
-// It checks especially that the user can create repos, and potentially more
-func (u *User) CanForkRepo() bool {
+// CanForkRepoIn ONLY checks repository count limit
+func (u *User) CanForkRepoIn(owner *User) bool {
if setting.Repository.AllowForkWithoutMaximumLimit {
return true
}
- return u.CanCreateRepo()
+ return u.CanCreateRepoIn(owner)
}
// CanImportLocal returns true if user can migrate repository by local path.
@@ -828,6 +828,21 @@ func IsLastAdminUser(ctx context.Context, user *User) bool {
type CountUserFilter struct {
LastLoginSince *int64
IsAdmin optional.Option[bool]
+ IsActive optional.Option[bool]
+}
+
+// HasUsers checks whether there are any users in the database, or only one user exists.
+func HasUsers(ctx context.Context) (ret struct {
+ HasAnyUser, HasOnlyOneUser bool
+}, err error,
+) {
+ res, err := db.GetEngine(ctx).Table(&User{}).Cols("id").Limit(2).Query()
+ if err != nil {
+ return ret, fmt.Errorf("error checking user existence: %w", err)
+ }
+ ret.HasAnyUser = len(res) != 0
+ ret.HasOnlyOneUser = len(res) == 1
+ return ret, nil
}
// CountUsers returns number of users.
@@ -848,6 +863,10 @@ func countUsers(ctx context.Context, opts *CountUserFilter) int64 {
if opts.IsAdmin.Has() {
cond = cond.And(builder.Eq{"is_admin": opts.IsAdmin.Value()})
}
+
+ if opts.IsActive.Has() {
+ cond = cond.And(builder.Eq{"is_active": opts.IsActive.Value()})
+ }
}
count, err := sess.Where(cond).Count(new(User))
@@ -1146,13 +1165,7 @@ func ValidateCommitsWithEmails(ctx context.Context, oldCommits []*git.Commit) ([
}
for _, c := range oldCommits {
- user, ok := emailUserMap[c.Author.Email]
- if !ok {
- user = &User{
- Name: c.Author.Name,
- Email: c.Author.Email,
- }
- }
+ user := emailUserMap.GetByEmail(c.Author.Email) // FIXME: why ValidateCommitsWithEmails uses "Author", but ParseCommitsWithSignature uses "Committer"?
newCommits = append(newCommits, &UserCommit{
User: user,
Commit: c,
@@ -1161,19 +1174,29 @@ func ValidateCommitsWithEmails(ctx context.Context, oldCommits []*git.Commit) ([
return newCommits, nil
}
-func GetUsersByEmails(ctx context.Context, emails []string) (map[string]*User, error) {
+type EmailUserMap struct {
+ m map[string]*User
+}
+
+func (eum *EmailUserMap) GetByEmail(email string) *User {
+ return eum.m[strings.ToLower(email)]
+}
+
+func GetUsersByEmails(ctx context.Context, emails []string) (*EmailUserMap, error) {
if len(emails) == 0 {
return nil, nil
}
needCheckEmails := make(container.Set[string])
needCheckUserNames := make(container.Set[string])
+ noReplyAddressSuffix := "@" + strings.ToLower(setting.Service.NoReplyAddress)
for _, email := range emails {
- if strings.HasSuffix(email, fmt.Sprintf("@%s", setting.Service.NoReplyAddress)) {
- username := strings.TrimSuffix(email, fmt.Sprintf("@%s", setting.Service.NoReplyAddress))
- needCheckUserNames.Add(username)
+ emailLower := strings.ToLower(email)
+ if noReplyUserNameLower, ok := strings.CutSuffix(emailLower, noReplyAddressSuffix); ok {
+ needCheckUserNames.Add(noReplyUserNameLower)
+ needCheckEmails.Add(emailLower)
} else {
- needCheckEmails.Add(strings.ToLower(email))
+ needCheckEmails.Add(emailLower)
}
}
@@ -1187,31 +1210,30 @@ func GetUsersByEmails(ctx context.Context, emails []string) (map[string]*User, e
for _, email := range emailAddresses {
userIDs.Add(email.UID)
}
- users, err := GetUsersMapByIDs(ctx, userIDs.Values())
- if err != nil {
- return nil, err
- }
-
results := make(map[string]*User, len(emails))
- for _, email := range emailAddresses {
- user := users[email.UID]
- if user != nil {
- if user.KeepEmailPrivate {
- results[user.LowerName+"@"+setting.Service.NoReplyAddress] = user
- } else {
- results[email.Email] = user
+
+ if len(userIDs) > 0 {
+ users, err := GetUsersMapByIDs(ctx, userIDs.Values())
+ if err != nil {
+ return nil, err
+ }
+
+ for _, email := range emailAddresses {
+ user := users[email.UID]
+ if user != nil {
+ results[email.LowerEmail] = user
}
}
}
- users = make(map[int64]*User, len(needCheckUserNames))
+ users := make(map[int64]*User, len(needCheckUserNames))
if err := db.GetEngine(ctx).In("lower_name", needCheckUserNames.Values()).Find(&users); err != nil {
return nil, err
}
for _, user := range users {
- results[user.LowerName+"@"+setting.Service.NoReplyAddress] = user
+ results[strings.ToLower(user.GetPlaceholderEmail())] = user
}
- return results, nil
+ return &EmailUserMap{results}, nil
}
// GetUserByEmail returns the user object by given e-mail if exists.
@@ -1232,8 +1254,8 @@ func GetUserByEmail(ctx context.Context, email string) (*User, error) {
}
// Finally, if email address is the protected email address:
- if strings.HasSuffix(email, fmt.Sprintf("@%s", setting.Service.NoReplyAddress)) {
- username := strings.TrimSuffix(email, fmt.Sprintf("@%s", setting.Service.NoReplyAddress))
+ if strings.HasSuffix(email, "@"+setting.Service.NoReplyAddress) {
+ username := strings.TrimSuffix(email, "@"+setting.Service.NoReplyAddress)
user := &User{}
has, err := db.GetEngine(ctx).Where("lower_name=?", username).Get(user)
if err != nil {
diff --git a/models/user/user_list.go b/models/user/user_list.go
index 4241905058..1b6a27dd86 100644
--- a/models/user/user_list.go
+++ b/models/user/user_list.go
@@ -17,10 +17,7 @@ func GetUsersMapByIDs(ctx context.Context, userIDs []int64) (map[int64]*User, er
left := len(userIDs)
for left > 0 {
- limit := db.DefaultMaxInSize
- if left < limit {
- limit = left
- }
+ limit := min(left, db.DefaultMaxInSize)
err := db.GetEngine(ctx).
In("id", userIDs[:limit]).
Find(&userMaps)
diff --git a/models/user/user_system.go b/models/user/user_system.go
index 6fbfd9e69e..e07274d291 100644
--- a/models/user/user_system.go
+++ b/models/user/user_system.go
@@ -10,8 +10,8 @@ import (
)
const (
- GhostUserID = -1
- GhostUserName = "Ghost"
+ GhostUserID int64 = -1
+ GhostUserName = "Ghost"
)
// NewGhostUser creates and returns a fake user for someone has deleted their account.
@@ -36,9 +36,9 @@ func (u *User) IsGhost() bool {
}
const (
- ActionsUserID = -2
- ActionsUserName = "gitea-actions"
- ActionsUserEmail = "teabot@gitea.io"
+ ActionsUserID int64 = -2
+ ActionsUserName = "gitea-actions"
+ ActionsUserEmail = "teabot@gitea.io"
)
func IsGiteaActionsUserName(name string) bool {
diff --git a/models/user/user_test.go b/models/user/user_test.go
index 1132c02f28..7944fc4b73 100644
--- a/models/user/user_test.go
+++ b/models/user/user_test.go
@@ -19,9 +19,11 @@ import (
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/test"
"code.gitea.io/gitea/modules/timeutil"
"github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
)
func TestIsUsableUsername(t *testing.T) {
@@ -47,14 +49,48 @@ func TestOAuth2Application_LoadUser(t *testing.T) {
assert.NotNil(t, user)
}
-func TestGetUserEmailsByNames(t *testing.T) {
+func TestUserEmails(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
+ t.Run("GetUserEmailsByNames", func(t *testing.T) {
+ // ignore none active user email
+ assert.ElementsMatch(t, []string{"user8@example.com"}, user_model.GetUserEmailsByNames(db.DefaultContext, []string{"user8", "user9"}))
+ assert.ElementsMatch(t, []string{"user8@example.com", "user5@example.com"}, user_model.GetUserEmailsByNames(db.DefaultContext, []string{"user8", "user5"}))
+ assert.ElementsMatch(t, []string{"user8@example.com"}, user_model.GetUserEmailsByNames(db.DefaultContext, []string{"user8", "org7"}))
+ })
+ t.Run("GetUsersByEmails", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.Service.NoReplyAddress, "NoReply.gitea.internal")()
+ testGetUserByEmail := func(t *testing.T, email string, uid int64) {
+ m, err := user_model.GetUsersByEmails(db.DefaultContext, []string{email})
+ require.NoError(t, err)
+ user := m.GetByEmail(email)
+ if uid == 0 {
+ require.Nil(t, user)
+ return
+ }
+ require.NotNil(t, user)
+ assert.Equal(t, uid, user.ID)
+ }
+ cases := []struct {
+ Email string
+ UID int64
+ }{
+ {"UseR1@example.com", 1},
+ {"user1-2@example.COM", 1},
+ {"USER2@" + setting.Service.NoReplyAddress, 2},
+ {"user4@example.com", 4},
+ {"no-such", 0},
+ }
+ for _, c := range cases {
+ t.Run(c.Email, func(t *testing.T) {
+ testGetUserByEmail(t, c.Email, c.UID)
+ })
+ }
- // ignore none active user email
- assert.ElementsMatch(t, []string{"user8@example.com"}, user_model.GetUserEmailsByNames(db.DefaultContext, []string{"user8", "user9"}))
- assert.ElementsMatch(t, []string{"user8@example.com", "user5@example.com"}, user_model.GetUserEmailsByNames(db.DefaultContext, []string{"user8", "user5"}))
-
- assert.ElementsMatch(t, []string{"user8@example.com"}, user_model.GetUserEmailsByNames(db.DefaultContext, []string{"user8", "org7"}))
+ t.Run("NoReplyConflict", func(t *testing.T) {
+ setting.Service.NoReplyAddress = "example.com"
+ testGetUserByEmail(t, "user1-2@example.COM", 1)
+ })
+ })
}
func TestCanCreateOrganization(t *testing.T) {
@@ -77,73 +113,73 @@ func TestCanCreateOrganization(t *testing.T) {
func TestSearchUsers(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
- testSuccess := func(opts *user_model.SearchUserOptions, expectedUserOrOrgIDs []int64) {
+ testSuccess := func(opts user_model.SearchUserOptions, expectedUserOrOrgIDs []int64) {
users, _, err := user_model.SearchUsers(db.DefaultContext, opts)
assert.NoError(t, err)
cassText := fmt.Sprintf("ids: %v, opts: %v", expectedUserOrOrgIDs, opts)
if assert.Len(t, users, len(expectedUserOrOrgIDs), "case: %s", cassText) {
for i, expectedID := range expectedUserOrOrgIDs {
- assert.EqualValues(t, expectedID, users[i].ID, "case: %s", cassText)
+ assert.Equal(t, expectedID, users[i].ID, "case: %s", cassText)
}
}
}
// test orgs
- testOrgSuccess := func(opts *user_model.SearchUserOptions, expectedOrgIDs []int64) {
+ testOrgSuccess := func(opts user_model.SearchUserOptions, expectedOrgIDs []int64) {
opts.Type = user_model.UserTypeOrganization
testSuccess(opts, expectedOrgIDs)
}
- testOrgSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1, PageSize: 2}},
+ testOrgSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1, PageSize: 2}},
[]int64{3, 6})
- testOrgSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 2, PageSize: 2}},
+ testOrgSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 2, PageSize: 2}},
[]int64{7, 17})
- testOrgSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 3, PageSize: 2}},
+ testOrgSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 3, PageSize: 2}},
[]int64{19, 25})
- testOrgSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 4, PageSize: 2}},
+ testOrgSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 4, PageSize: 2}},
[]int64{26, 41})
- testOrgSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 5, PageSize: 2}},
+ testOrgSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 5, PageSize: 2}},
[]int64{42})
- testOrgSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 6, PageSize: 2}},
+ testOrgSuccess(user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 6, PageSize: 2}},
[]int64{})
// test users
- testUserSuccess := func(opts *user_model.SearchUserOptions, expectedUserIDs []int64) {
+ testUserSuccess := func(opts user_model.SearchUserOptions, expectedUserIDs []int64) {
opts.Type = user_model.UserTypeIndividual
testSuccess(opts, expectedUserIDs)
}
- testUserSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}},
+ testUserSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}},
[]int64{1, 2, 4, 5, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 20, 21, 24, 27, 28, 29, 30, 32, 34, 37, 38, 39, 40})
- testUserSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(false)},
+ testUserSuccess(user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(false)},
[]int64{9})
- testUserSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(true)},
+ testUserSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(true)},
[]int64{1, 2, 4, 5, 8, 10, 11, 12, 13, 14, 15, 16, 18, 20, 21, 24, 27, 28, 29, 30, 32, 34, 37, 38, 39, 40})
- testUserSuccess(&user_model.SearchUserOptions{Keyword: "user1", OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(true)},
+ testUserSuccess(user_model.SearchUserOptions{Keyword: "user1", OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(true)},
[]int64{1, 10, 11, 12, 13, 14, 15, 16, 18})
// order by name asc default
- testUserSuccess(&user_model.SearchUserOptions{Keyword: "user1", ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(true)},
+ testUserSuccess(user_model.SearchUserOptions{Keyword: "user1", ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(true)},
[]int64{1, 10, 11, 12, 13, 14, 15, 16, 18})
- testUserSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsAdmin: optional.Some(true)},
+ testUserSuccess(user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsAdmin: optional.Some(true)},
[]int64{1})
- testUserSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsRestricted: optional.Some(true)},
+ testUserSuccess(user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsRestricted: optional.Some(true)},
[]int64{29})
- testUserSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsProhibitLogin: optional.Some(true)},
+ testUserSuccess(user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsProhibitLogin: optional.Some(true)},
[]int64{37})
- testUserSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsTwoFactorEnabled: optional.Some(true)},
+ testUserSuccess(user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsTwoFactorEnabled: optional.Some(true)},
[]int64{24})
}
@@ -173,9 +209,9 @@ func TestHashPasswordDeterministic(t *testing.T) {
b := make([]byte, 16)
u := &user_model.User{}
algos := hash.RecommendedHashAlgorithms
- for j := 0; j < len(algos); j++ {
+ for j := range algos {
u.PasswdHashAlgo = algos[j]
- for i := 0; i < 50; i++ {
+ for range 50 {
// generate a random password
rand.Read(b)
pass := string(b)
@@ -502,18 +538,15 @@ func TestIsUserVisibleToViewer(t *testing.T) {
}
func Test_ValidateUser(t *testing.T) {
- oldSetting := setting.Service.AllowedUserVisibilityModesSlice
- defer func() {
- setting.Service.AllowedUserVisibilityModesSlice = oldSetting
- }()
- setting.Service.AllowedUserVisibilityModesSlice = []bool{true, false, true}
+ defer test.MockVariableValue(&setting.Service.AllowedUserVisibilityModesSlice, []bool{true, false, true})()
+
kases := map[*user_model.User]bool{
{ID: 1, Visibility: structs.VisibleTypePublic}: true,
{ID: 2, Visibility: structs.VisibleTypeLimited}: false,
{ID: 2, Visibility: structs.VisibleTypePrivate}: true,
}
for kase, expected := range kases {
- assert.EqualValues(t, expected, nil == user_model.ValidateUser(kase), "case: %+v", kase)
+ assert.Equal(t, expected, nil == user_model.ValidateUser(kase), "case: %+v", kase)
}
}
@@ -537,7 +570,7 @@ func Test_NormalizeUserFromEmail(t *testing.T) {
for _, testCase := range testCases {
normalizedName, err := user_model.NormalizeUserName(testCase.Input)
assert.NoError(t, err)
- assert.EqualValues(t, testCase.Expected, normalizedName)
+ assert.Equal(t, testCase.Expected, normalizedName)
if testCase.IsNormalizedValid {
assert.NoError(t, user_model.IsUsableUsername(normalizedName))
} else {
@@ -564,7 +597,7 @@ func TestEmailTo(t *testing.T) {
for _, testCase := range testCases {
t.Run(testCase.result, func(t *testing.T) {
testUser := &user_model.User{FullName: testCase.fullName, Email: testCase.mail}
- assert.EqualValues(t, testCase.result, testUser.EmailTo())
+ assert.Equal(t, testCase.result, testUser.EmailTo())
})
}
}
@@ -575,12 +608,7 @@ func TestDisabledUserFeatures(t *testing.T) {
testValues := container.SetOf(setting.UserFeatureDeletion,
setting.UserFeatureManageSSHKeys,
setting.UserFeatureManageGPGKeys)
-
- oldSetting := setting.Admin.ExternalUserDisableFeatures
- defer func() {
- setting.Admin.ExternalUserDisableFeatures = oldSetting
- }()
- setting.Admin.ExternalUserDisableFeatures = testValues
+ defer test.MockVariableValue(&setting.Admin.ExternalUserDisableFeatures, testValues)()
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
@@ -616,3 +644,37 @@ func TestGetInactiveUsers(t *testing.T) {
assert.NoError(t, err)
assert.Empty(t, users)
}
+
+func TestCanCreateRepo(t *testing.T) {
+ defer test.MockVariableValue(&setting.Repository.MaxCreationLimit)()
+ const noLimit = -1
+ doerNormal := &user_model.User{}
+ doerAdmin := &user_model.User{IsAdmin: true}
+ t.Run("NoGlobalLimit", func(t *testing.T) {
+ setting.Repository.MaxCreationLimit = noLimit
+
+ assert.True(t, doerNormal.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: noLimit}))
+ assert.False(t, doerNormal.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: 0}))
+ assert.True(t, doerNormal.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: 100}))
+
+ assert.True(t, doerAdmin.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: noLimit}))
+ assert.True(t, doerAdmin.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: 0}))
+ assert.True(t, doerAdmin.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: 100}))
+ })
+
+ t.Run("GlobalLimit50", func(t *testing.T) {
+ setting.Repository.MaxCreationLimit = 50
+
+ assert.True(t, doerNormal.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: noLimit}))
+ assert.False(t, doerNormal.CanCreateRepoIn(&user_model.User{NumRepos: 60, MaxRepoCreation: noLimit})) // limited by global limit
+ assert.False(t, doerNormal.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: 0}))
+ assert.True(t, doerNormal.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: 100}))
+ assert.True(t, doerNormal.CanCreateRepoIn(&user_model.User{NumRepos: 60, MaxRepoCreation: 100}))
+
+ assert.True(t, doerAdmin.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: noLimit}))
+ assert.True(t, doerAdmin.CanCreateRepoIn(&user_model.User{NumRepos: 60, MaxRepoCreation: noLimit}))
+ assert.True(t, doerAdmin.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: 0}))
+ assert.True(t, doerAdmin.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: 100}))
+ assert.True(t, doerAdmin.CanCreateRepoIn(&user_model.User{NumRepos: 60, MaxRepoCreation: 100}))
+ })
+}
diff --git a/models/webhook/hooktask.go b/models/webhook/hooktask.go
index ff3fdbadb2..96ec11e43f 100644
--- a/models/webhook/hooktask.go
+++ b/models/webhook/hooktask.go
@@ -198,7 +198,8 @@ func MarkTaskDelivered(ctx context.Context, task *HookTask) (bool, error) {
func CleanupHookTaskTable(ctx context.Context, cleanupType HookTaskCleanupType, olderThan time.Duration, numberToKeep int) error {
log.Trace("Doing: CleanupHookTaskTable")
- if cleanupType == OlderThan {
+ switch cleanupType {
+ case OlderThan:
deleteOlderThan := time.Now().Add(-olderThan).UnixNano()
deletes, err := db.GetEngine(ctx).
Where("is_delivered = ? and delivered < ?", true, deleteOlderThan).
@@ -207,7 +208,7 @@ func CleanupHookTaskTable(ctx context.Context, cleanupType HookTaskCleanupType,
return err
}
log.Trace("Deleted %d rows from hook_task", deletes)
- } else if cleanupType == PerWebhook {
+ case PerWebhook:
hookIDs := make([]int64, 0, 10)
err := db.GetEngine(ctx).
Table("webhook").
diff --git a/models/webhook/webhook.go b/models/webhook/webhook.go
index 97ad373027..7d4b2e2237 100644
--- a/models/webhook/webhook.go
+++ b/models/webhook/webhook.go
@@ -240,7 +240,7 @@ func CreateWebhooks(ctx context.Context, ws []*Webhook) error {
if len(ws) == 0 {
return nil
}
- for i := 0; i < len(ws); i++ {
+ for i := range ws {
ws[i].Type = strings.TrimSpace(ws[i].Type)
}
return db.Insert(ctx, ws)
@@ -319,21 +319,16 @@ func UpdateWebhookLastStatus(ctx context.Context, w *Webhook) error {
// DeleteWebhookByID uses argument bean as query condition,
// ID must be specified and do not assign unnecessary fields.
func DeleteWebhookByID(ctx context.Context, id int64) (err error) {
- ctx, committer, err := db.TxContext(ctx)
- if err != nil {
- return err
- }
- defer committer.Close()
-
- if count, err := db.DeleteByID[Webhook](ctx, id); err != nil {
- return err
- } else if count == 0 {
- return ErrWebhookNotExist{ID: id}
- } else if _, err = db.DeleteByBean(ctx, &HookTask{HookID: id}); err != nil {
- return err
- }
-
- return committer.Commit()
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ if count, err := db.DeleteByID[Webhook](ctx, id); err != nil {
+ return err
+ } else if count == 0 {
+ return ErrWebhookNotExist{ID: id}
+ } else if _, err = db.DeleteByBean(ctx, &HookTask{HookID: id}); err != nil {
+ return err
+ }
+ return nil
+ })
}
// DeleteWebhookByRepoID deletes webhook of repository by given ID.
diff --git a/models/webhook/webhook_test.go b/models/webhook/webhook_test.go
index 6ff77a380d..edad8fc996 100644
--- a/models/webhook/webhook_test.go
+++ b/models/webhook/webhook_test.go
@@ -67,13 +67,13 @@ func TestWebhook_UpdateEvent(t *testing.T) {
}
func TestWebhook_EventsArray(t *testing.T) {
- assert.EqualValues(t, []string{
+ assert.Equal(t, []string{
"create", "delete", "fork", "push",
"issues", "issue_assign", "issue_label", "issue_milestone", "issue_comment",
"pull_request", "pull_request_assign", "pull_request_label", "pull_request_milestone",
"pull_request_comment", "pull_request_review_approved", "pull_request_review_rejected",
"pull_request_review_comment", "pull_request_sync", "pull_request_review_request", "wiki", "repository", "release",
- "package", "status", "workflow_job",
+ "package", "status", "workflow_run", "workflow_job",
},
(&Webhook{
HookEvent: &webhook_module.HookEvent{SendEverything: true},
@@ -90,7 +90,7 @@ func TestWebhook_EventsArray(t *testing.T) {
func TestCreateWebhook(t *testing.T) {
hook := &Webhook{
RepoID: 3,
- URL: "www.example.com/unit_test",
+ URL: "https://www.example.com/unit_test",
ContentType: ContentTypeJSON,
Events: `{"push_only":false,"send_everything":false,"choose_events":false,"events":{"create":false,"push":true,"pull_request":true}}`,
}