Mercurial > september
comparison september.py @ 1:6bbebb01f614
Bunch of fixes:
* Don't change the current working directory.
* Clone the repo as late as possible.
* Add `--scan-only` and `--status` options.
* Fix `first_tag`.
* Correctly pull and update an existing clone.
* Use the system's temp directory if no temp directory is specified.
author | Ludovic Chabant <ludovic@chabant.com> |
---|---|
date | Sat, 28 Mar 2015 15:43:44 -0700 |
parents | ee98303e24b8 |
children | bdfc8a4a335d |
comparison
equal
deleted
inserted
replaced
0:ee98303e24b8 | 1:6bbebb01f614 |
---|---|
2 import re | 2 import re |
3 import sys | 3 import sys |
4 import json | 4 import json |
5 import os.path | 5 import os.path |
6 import logging | 6 import logging |
7 import hashlib | |
7 import argparse | 8 import argparse |
9 import tempfile | |
8 import subprocess | 10 import subprocess |
9 import configparser | 11 import configparser |
10 from urllib.parse import urlparse | 12 from urllib.parse import urlparse |
11 | 13 |
12 | 14 |
15 | 17 |
16 class IRepo(object): | 18 class IRepo(object): |
17 def clone(self, repo_url, repo_path): | 19 def clone(self, repo_url, repo_path): |
18 raise NotImplementedError() | 20 raise NotImplementedError() |
19 | 21 |
20 def pull(self, repo_path): | 22 def pull(self, repo_path, remote): |
21 raise NotImplementedError() | 23 raise NotImplementedError() |
22 | 24 |
23 def getTags(self, repo_path): | 25 def getTags(self, repo_path): |
24 raise NotImplementedError() | 26 raise NotImplementedError() |
25 | 27 |
29 | 31 |
30 class GitRepo(object): | 32 class GitRepo(object): |
31 def clone(self, repo_url, repo_path): | 33 def clone(self, repo_url, repo_path): |
32 subprocess.check_call(['git', 'clone', repo_url, repo_path]) | 34 subprocess.check_call(['git', 'clone', repo_url, repo_path]) |
33 | 35 |
34 def pull(self, repo_path): | 36 def pull(self, repo_path, remote): |
35 subprocess.check_call(['git', 'pull', 'origin', 'master']) | 37 subprocess.check_call(['git', '-C', repo_path, |
38 'pull', remote, 'master']) | |
36 | 39 |
37 def getTags(self, repo_path): | 40 def getTags(self, repo_path): |
38 output = subprocess.check_output(['git', 'show-ref', '--tags']) | 41 output = subprocess.check_output(['git', '-C', repo_path, |
42 'show-ref', '--tags']) | |
39 pat = re.compile(r'^(?P<id>[0-9a-f]+) (?P<tag>.+)$') | 43 pat = re.compile(r'^(?P<id>[0-9a-f]+) (?P<tag>.+)$') |
40 for line in output.split('\n'): | 44 for line in output.split('\n'): |
41 m = pat.match(line) | 45 m = pat.match(line) |
42 if m: | 46 if m: |
43 yield (m.group('tag'), m.group('id')) | 47 yield (m.group('tag'), m.group('id')) |
44 | 48 |
45 def update(self, repo_path, rev_id): | 49 def update(self, repo_path, rev_id): |
46 rev_id = rev_id or 'master' | 50 rev_id = rev_id or 'master' |
47 subprocess.check_call(['git', 'checkout', rev_id]) | 51 subprocess.check_call(['git', '-C', repo_path, 'checkout', rev_id]) |
48 | 52 |
49 | 53 |
50 class MercurialRepo(object): | 54 class MercurialRepo(object): |
51 def clone(self, repo_url, repo_path): | 55 def clone(self, repo_url, repo_path): |
52 subprocess.check_call(['hg', 'clone', repo_url, repo_path]) | 56 subprocess.check_call(['hg', 'clone', repo_url, repo_path]) |
53 | 57 |
54 def pull(self, repo_path): | 58 def pull(self, repo_path, remote): |
55 subprocess.check_call(['hg', 'pull'], | 59 subprocess.check_call(['hg', '-R', repo_path, 'pull', remote], |
56 stderr=subprocess.STDOUT) | 60 stderr=subprocess.STDOUT) |
57 | 61 |
58 def getTags(self, repo_path): | 62 def getTags(self, repo_path): |
59 output = subprocess.check_output( | 63 output = subprocess.check_output( |
60 'hg log -r "tag()" --template "{tags} {node}\\n"', | 64 ('hg -R "' + repo_path + |
65 '" log -r "tag()" --template "{tags} {node}\\n"'), | |
61 stderr=subprocess.STDOUT, | 66 stderr=subprocess.STDOUT, |
62 universal_newlines=True, | 67 universal_newlines=True, |
63 shell=True) | 68 shell=True) |
64 pat = re.compile(r'^(?P<tag>.+) (?P<id>[0-9a-f]+)$') | 69 pat = re.compile(r'^(?P<tag>.+) (?P<id>[0-9a-f]+)$') |
65 for line in output.split('\n'): | 70 for line in output.split('\n'): |
67 if m: | 72 if m: |
68 yield (m.group('tag'), m.group('id')) | 73 yield (m.group('tag'), m.group('id')) |
69 | 74 |
70 def update(self, repo_path, rev_id): | 75 def update(self, repo_path, rev_id): |
71 rev_id = rev_id or 'default' | 76 rev_id = rev_id or 'default' |
72 subprocess.check_call(['hg', 'update', rev_id], | 77 subprocess.check_call(['hg', '-R', repo_path, 'update', rev_id], |
73 stderr=subprocess.STDOUT) | 78 stderr=subprocess.STDOUT) |
74 | 79 |
75 | 80 |
76 repo_class = { | 81 repo_class = { |
77 'git': GitRepo, | 82 'git': GitRepo, |
103 prog='september', | 108 prog='september', |
104 description=("An utility that goes back in time and does " | 109 description=("An utility that goes back in time and does " |
105 "something in the background.")) | 110 "something in the background.")) |
106 parser.add_argument( | 111 parser.add_argument( |
107 'repo', | 112 'repo', |
113 nargs='?', | |
108 help="The repository to observe and process") | 114 help="The repository to observe and process") |
109 parser.add_argument( | 115 parser.add_argument( |
110 '-t', '--tmp-dir', | 116 '-t', '--tmp-dir', |
111 help="The temporary directory in which to clone the repository.") | 117 help="The temporary directory in which to clone the repository.") |
112 parser.add_argument( | 118 parser.add_argument( |
118 '--config', | 124 '--config', |
119 help="The configuration file to use.") | 125 help="The configuration file to use.") |
120 parser.add_argument( | 126 parser.add_argument( |
121 '--command', | 127 '--command', |
122 help="The command to run on each tag.") | 128 help="The command to run on each tag.") |
123 | 129 parser.add_argument( |
124 # Parse arguments, guess repo type. | 130 '--scan-only', |
131 action='store_true', | |
132 help=("Only scan the repository history. Don't update or run the " | |
133 "command")) | |
134 parser.add_argument( | |
135 '--status', | |
136 action='store_true', | |
137 help="See September's status for the given repository.") | |
138 | |
139 # Parse arguments. | |
125 res = parser.parse_args() | 140 res = parser.parse_args() |
141 repo_dir = res.repo or os.getcwd() | |
142 | |
143 # Guess the repo type. | |
126 repo_type = res.scm | 144 repo_type = res.scm |
127 if not repo_type or repo_type == 'guess': | 145 if not repo_type or repo_type == 'guess': |
128 repo_type = guess_repo_type(res.repo) | 146 repo_type = guess_repo_type(repo_dir) |
129 if not repo_type: | 147 if not repo_type: |
130 logger.error("Can't guess the repository type. Please use the " | 148 logger.error("Can't guess the repository type. Please use the " |
131 "--scm option to specify it.") | 149 "--scm option to specify it.") |
132 sys.exit(1) | 150 sys.exit(1) |
133 if repo_type not in repo_class: | 151 if repo_type not in repo_class: |
134 logger.error("Unknown repository type: %s" % repo_type) | 152 logger.error("Unknown repository type: %s" % repo_type) |
135 sys.exit(1) | 153 sys.exit(1) |
136 | 154 |
137 # Create the repo handler. | 155 # Find the configuration file. |
138 repo = repo_class[repo_type]() | 156 config_file = res.config or os.path.join(repo_dir, '.september.cfg') |
139 | |
140 # Clone or update/checkout the repository in the temp directory. | |
141 clone_dir = os.path.join(res.tmp_dir, 'clone') | |
142 if not os.path.exists(clone_dir): | |
143 logger.info("Cloning '%s' into: %s" % (res.repo, clone_dir)) | |
144 repo.clone(res.repo, clone_dir) | |
145 else: | |
146 os.chdir(clone_dir) | |
147 logger.info("Pulling changes from '%s'." % res.repo) | |
148 repo.update(res.repo, None) | |
149 | |
150 os.chdir(clone_dir) | |
151 | |
152 # Find the configuration file in the repository clone. | |
153 config_file = res.config or os.path.join(clone_dir, '.september.yml') | |
154 config = configparser.ConfigParser(interpolation=None) | 157 config = configparser.ConfigParser(interpolation=None) |
155 if os.path.exists(config_file): | 158 if os.path.exists(config_file): |
156 logger.info("Loading configuration file: %s" % config_file) | 159 logger.info("Loading configuration file: %s" % config_file) |
157 config.read(config_file) | 160 config.read(config_file) |
158 | 161 |
162 # Validate the configuration. | |
159 if not config.has_section('september'): | 163 if not config.has_section('september'): |
160 config.add_section('september') | 164 config.add_section('september') |
161 config_sec = config['september'] | 165 config_sec = config['september'] |
162 if res.command: | 166 if res.command: |
163 config_sec['command'] = res.command | 167 config_sec['command'] = res.command |
168 if res.tmp_dir: | |
169 config_sec['tmp_dir'] = res.tmp_dir | |
164 | 170 |
165 if not config.has_option('september', 'command'): | 171 if not config.has_option('september', 'command'): |
166 logger.error("There is no 'command' configuration setting under the " | 172 logger.error("There is no 'command' configuration setting under the " |
167 "'september' section, and no command was passed as an " | 173 "'september' section, and no command was passed as an " |
168 "option.") | 174 "option.") |
169 sys.exit(1) | 175 sys.exit(1) |
170 | 176 |
177 # Get the temp dir. | |
178 tmp_dir = config_sec.get('tmp_dir', None) | |
179 if not tmp_dir: | |
180 tmp_name = 'september_%s' % hashlib.md5( | |
181 repo_dir.encode('utf8')).hexdigest() | |
182 tmp_dir = os.path.join(tempfile.gettempdir(), tmp_name) | |
183 | |
171 # Find the cache file in the temp directory. | 184 # Find the cache file in the temp directory. |
172 cache_file = os.path.join(res.tmp_dir, 'september.json') | 185 cache_file = os.path.join(tmp_dir, 'september.json') |
173 if os.path.exists(cache_file): | 186 if os.path.exists(cache_file): |
174 with open(cache_file, 'r') as fp: | 187 with open(cache_file, 'r') as fp: |
175 cache = json.load(fp) | 188 cache = json.load(fp) |
176 else: | 189 else: |
177 cache = {'tags': {}} | 190 cache = {'tags': {}} |
191 | |
192 # See if we just need to show the status: | |
193 if res.status: | |
194 logger.info("Status for '%s':" % repo_dir) | |
195 for t, v in cache['tags'].items(): | |
196 logger.info("- %s" % t) | |
197 logger.info(" commit ID : %s" % v['id']) | |
198 logger.info(" processed? : %s" % v['processed']) | |
199 return | |
200 | |
201 # Create the repo handler. | |
202 repo = repo_class[repo_type]() | |
178 | 203 |
179 # Update the cache: get any new/moved tags. | 204 # Update the cache: get any new/moved tags. |
180 first_tag = config_sec.get('first_tag', None) | 205 first_tag = config_sec.get('first_tag', None) |
181 tag_pattern = config_sec.get('tag_pattern', None) | 206 tag_pattern = config_sec.get('tag_pattern', None) |
182 tag_re = None | 207 tag_re = None |
183 if tag_pattern: | 208 if tag_pattern: |
184 tag_re = re.compile(tag_pattern) | 209 tag_re = re.compile(tag_pattern) |
185 | 210 |
211 reached_first_tag = not bool(first_tag) | |
186 previous_tags = cache['tags'] | 212 previous_tags = cache['tags'] |
187 tags = repo.getTags(clone_dir) | 213 tags = repo.getTags(repo_dir) |
188 for t, i in tags: | 214 for t, i in tags: |
215 if not reached_first_tag and first_tag == t: | |
216 reached_first_tag = True | |
217 | |
218 if not reached_first_tag: | |
219 if t in previous_tags: | |
220 logger.info("Removing tag '%s'." % t) | |
221 del previous_tags[t] | |
222 continue | |
223 | |
189 if not tag_re or tag_re.search(t): | 224 if not tag_re or tag_re.search(t): |
190 if t not in previous_tags: | 225 if t not in previous_tags: |
191 logger.info("Adding tag '%s'." % t) | 226 logger.info("Adding tag '%s'." % t) |
192 previous_tags[t] = {'id': i, 'processed': False} | 227 previous_tags[t] = {'id': i, 'processed': False} |
193 elif previous_tags[t]['id'] != i: | 228 elif previous_tags[t]['id'] != i: |
194 logger.info("Moving tag '%s'." % t) | 229 logger.info("Moving tag '%s'." % t) |
195 previous_tags[t] = {'id': i, 'processed': False} | 230 previous_tags[t] = {'id': i, 'processed': False} |
196 | 231 |
197 if first_tag and first_tag == t: | |
198 break | |
199 | |
200 logger.info("Updating cache file '%s'." % cache_file) | 232 logger.info("Updating cache file '%s'." % cache_file) |
201 with open(cache_file, 'w') as fp: | 233 with open(cache_file, 'w') as fp: |
202 json.dump(cache, fp) | 234 json.dump(cache, fp) |
235 | |
236 if res.scan_only: | |
237 return | |
238 | |
239 # Clone or update/checkout the repository in the temp directory. | |
240 clone_dir = os.path.join(tmp_dir, 'clone') | |
241 if not os.path.exists(clone_dir): | |
242 logger.info("Cloning '%s' into: %s" % (repo_dir, clone_dir)) | |
243 repo.clone(repo_dir, clone_dir) | |
244 else: | |
245 logger.info("Pulling changes from '%s'." % repo_dir) | |
246 repo.pull(clone_dir, repo_dir) | |
247 repo.update(clone_dir, None) | |
203 | 248 |
204 # Process tags! | 249 # Process tags! |
205 use_shell = config_sec.get('use_shell') in ['1', 'yes', 'true'] | 250 use_shell = config_sec.get('use_shell') in ['1', 'yes', 'true'] |
206 for tn, ti in cache['tags'].items(): | 251 for tn, ti in cache['tags'].items(): |
207 if ti['processed']: | 252 if ti['processed']: |