0
|
1 import os
|
|
2 import re
|
|
3 import sys
|
|
4 import json
|
|
5 import os.path
|
|
6 import logging
|
1
|
7 import hashlib
|
0
|
8 import argparse
|
1
|
9 import tempfile
|
0
|
10 import subprocess
|
|
11 import configparser
|
|
12 from urllib.parse import urlparse
|
|
13
|
|
14
|
|
15 logger = logging.getLogger(__name__)
|
|
16
|
|
17
|
|
18 class IRepo(object):
|
|
19 def clone(self, repo_url, repo_path):
|
|
20 raise NotImplementedError()
|
|
21
|
1
|
22 def pull(self, repo_path, remote):
|
0
|
23 raise NotImplementedError()
|
|
24
|
|
25 def getTags(self, repo_path):
|
|
26 raise NotImplementedError()
|
|
27
|
|
28 def update(self, repo_path, rev_id):
|
|
29 raise NotImplementedError()
|
|
30
|
|
31
|
|
32 class GitRepo(object):
|
|
33 def clone(self, repo_url, repo_path):
|
|
34 subprocess.check_call(['git', 'clone', repo_url, repo_path])
|
|
35
|
1
|
36 def pull(self, repo_path, remote):
|
|
37 subprocess.check_call(['git', '-C', repo_path,
|
|
38 'pull', remote, 'master'])
|
0
|
39
|
|
40 def getTags(self, repo_path):
|
1
|
41 output = subprocess.check_output(['git', '-C', repo_path,
|
|
42 'show-ref', '--tags'])
|
0
|
43 pat = re.compile(r'^(?P<id>[0-9a-f]+) (?P<tag>.+)$')
|
|
44 for line in output.split('\n'):
|
|
45 m = pat.match(line)
|
|
46 if m:
|
|
47 yield (m.group('tag'), m.group('id'))
|
|
48
|
|
49 def update(self, repo_path, rev_id):
|
|
50 rev_id = rev_id or 'master'
|
1
|
51 subprocess.check_call(['git', '-C', repo_path, 'checkout', rev_id])
|
0
|
52
|
|
53
|
|
54 class MercurialRepo(object):
|
|
55 def clone(self, repo_url, repo_path):
|
|
56 subprocess.check_call(['hg', 'clone', repo_url, repo_path])
|
|
57
|
1
|
58 def pull(self, repo_path, remote):
|
|
59 subprocess.check_call(['hg', '-R', repo_path, 'pull', remote],
|
0
|
60 stderr=subprocess.STDOUT)
|
|
61
|
|
62 def getTags(self, repo_path):
|
|
63 output = subprocess.check_output(
|
1
|
64 ('hg -R "' + repo_path +
|
|
65 '" log -r "tag()" --template "{tags} {node}\\n"'),
|
0
|
66 stderr=subprocess.STDOUT,
|
|
67 universal_newlines=True,
|
|
68 shell=True)
|
|
69 pat = re.compile(r'^(?P<tag>.+) (?P<id>[0-9a-f]+)$')
|
|
70 for line in output.split('\n'):
|
|
71 m = pat.match(line)
|
|
72 if m:
|
|
73 yield (m.group('tag'), m.group('id'))
|
|
74
|
|
75 def update(self, repo_path, rev_id):
|
|
76 rev_id = rev_id or 'default'
|
1
|
77 subprocess.check_call(['hg', '-R', repo_path, 'update', rev_id],
|
0
|
78 stderr=subprocess.STDOUT)
|
|
79
|
|
80
|
|
81 repo_class = {
|
|
82 'git': GitRepo,
|
|
83 'hg': MercurialRepo}
|
|
84
|
|
85
|
|
86 def guess_repo_type(repo):
|
|
87 # Parse repo as an URL: scheme://netloc/path;parameters?query#fragment
|
|
88 scheme, netloc, path, params, query, fragment = urlparse(repo)
|
|
89 if scheme == 'ssh':
|
|
90 if netloc.startswith('git@'):
|
|
91 return 'git'
|
|
92 if netloc.startswith('hg@'):
|
|
93 return 'hg'
|
|
94 elif scheme == 'https':
|
|
95 if path.endswith('.git'):
|
|
96 return 'git'
|
|
97 elif scheme == '' and netloc == '' and os.path.isdir(path):
|
|
98 if os.path.isdir(os.path.join(path, '.git')):
|
|
99 return 'git'
|
|
100 if os.path.isdir(os.path.join(path, '.hg')):
|
|
101 return 'hg'
|
|
102 return None
|
|
103
|
|
104
|
|
105 def main():
|
|
106 # Setup the argument parser.
|
|
107 parser = argparse.ArgumentParser(
|
|
108 prog='september',
|
|
109 description=("An utility that goes back in time and does "
|
|
110 "something in the background."))
|
|
111 parser.add_argument(
|
|
112 'repo',
|
1
|
113 nargs='?',
|
0
|
114 help="The repository to observe and process")
|
|
115 parser.add_argument(
|
|
116 '-t', '--tmp-dir',
|
|
117 help="The temporary directory in which to clone the repository.")
|
|
118 parser.add_argument(
|
|
119 '--scm',
|
|
120 default='guess',
|
|
121 choices=['guess', 'git', 'mercurial'],
|
|
122 help="The type of source control system handling the repository.")
|
|
123 parser.add_argument(
|
|
124 '--config',
|
|
125 help="The configuration file to use.")
|
|
126 parser.add_argument(
|
|
127 '--command',
|
|
128 help="The command to run on each tag.")
|
1
|
129 parser.add_argument(
|
|
130 '--scan-only',
|
|
131 action='store_true',
|
|
132 help=("Only scan the repository history. Don't update or run the "
|
|
133 "command"))
|
|
134 parser.add_argument(
|
|
135 '--status',
|
|
136 action='store_true',
|
|
137 help="See September's status for the given repository.")
|
0
|
138
|
1
|
139 # Parse arguments.
|
0
|
140 res = parser.parse_args()
|
1
|
141 repo_dir = res.repo or os.getcwd()
|
|
142
|
|
143 # Guess the repo type.
|
0
|
144 repo_type = res.scm
|
|
145 if not repo_type or repo_type == 'guess':
|
1
|
146 repo_type = guess_repo_type(repo_dir)
|
0
|
147 if not repo_type:
|
|
148 logger.error("Can't guess the repository type. Please use the "
|
|
149 "--scm option to specify it.")
|
|
150 sys.exit(1)
|
|
151 if repo_type not in repo_class:
|
|
152 logger.error("Unknown repository type: %s" % repo_type)
|
|
153 sys.exit(1)
|
|
154
|
1
|
155 # Find the configuration file.
|
|
156 config_file = res.config or os.path.join(repo_dir, '.september.cfg')
|
0
|
157 config = configparser.ConfigParser(interpolation=None)
|
|
158 if os.path.exists(config_file):
|
|
159 logger.info("Loading configuration file: %s" % config_file)
|
|
160 config.read(config_file)
|
|
161
|
1
|
162 # Validate the configuration.
|
0
|
163 if not config.has_section('september'):
|
|
164 config.add_section('september')
|
|
165 config_sec = config['september']
|
|
166 if res.command:
|
|
167 config_sec['command'] = res.command
|
1
|
168 if res.tmp_dir:
|
|
169 config_sec['tmp_dir'] = res.tmp_dir
|
0
|
170
|
|
171 if not config.has_option('september', 'command'):
|
|
172 logger.error("There is no 'command' configuration setting under the "
|
|
173 "'september' section, and no command was passed as an "
|
|
174 "option.")
|
|
175 sys.exit(1)
|
|
176
|
1
|
177 # Get the temp dir.
|
|
178 tmp_dir = config_sec.get('tmp_dir', None)
|
|
179 if not tmp_dir:
|
|
180 tmp_name = 'september_%s' % hashlib.md5(
|
|
181 repo_dir.encode('utf8')).hexdigest()
|
|
182 tmp_dir = os.path.join(tempfile.gettempdir(), tmp_name)
|
|
183
|
0
|
184 # Find the cache file in the temp directory.
|
1
|
185 cache_file = os.path.join(tmp_dir, 'september.json')
|
0
|
186 if os.path.exists(cache_file):
|
|
187 with open(cache_file, 'r') as fp:
|
|
188 cache = json.load(fp)
|
|
189 else:
|
|
190 cache = {'tags': {}}
|
|
191
|
1
|
192 # See if we just need to show the status:
|
|
193 if res.status:
|
|
194 logger.info("Status for '%s':" % repo_dir)
|
|
195 for t, v in cache['tags'].items():
|
|
196 logger.info("- %s" % t)
|
|
197 logger.info(" commit ID : %s" % v['id'])
|
|
198 logger.info(" processed? : %s" % v['processed'])
|
|
199 return
|
|
200
|
|
201 # Create the repo handler.
|
|
202 repo = repo_class[repo_type]()
|
|
203
|
0
|
204 # Update the cache: get any new/moved tags.
|
|
205 first_tag = config_sec.get('first_tag', None)
|
|
206 tag_pattern = config_sec.get('tag_pattern', None)
|
|
207 tag_re = None
|
|
208 if tag_pattern:
|
|
209 tag_re = re.compile(tag_pattern)
|
|
210
|
1
|
211 reached_first_tag = not bool(first_tag)
|
0
|
212 previous_tags = cache['tags']
|
1
|
213 tags = repo.getTags(repo_dir)
|
0
|
214 for t, i in tags:
|
1
|
215 if not reached_first_tag and first_tag == t:
|
|
216 reached_first_tag = True
|
|
217
|
|
218 if not reached_first_tag:
|
|
219 if t in previous_tags:
|
|
220 logger.info("Removing tag '%s'." % t)
|
|
221 del previous_tags[t]
|
|
222 continue
|
|
223
|
0
|
224 if not tag_re or tag_re.search(t):
|
|
225 if t not in previous_tags:
|
|
226 logger.info("Adding tag '%s'." % t)
|
|
227 previous_tags[t] = {'id': i, 'processed': False}
|
|
228 elif previous_tags[t]['id'] != i:
|
|
229 logger.info("Moving tag '%s'." % t)
|
|
230 previous_tags[t] = {'id': i, 'processed': False}
|
|
231
|
|
232 logger.info("Updating cache file '%s'." % cache_file)
|
|
233 with open(cache_file, 'w') as fp:
|
|
234 json.dump(cache, fp)
|
|
235
|
1
|
236 if res.scan_only:
|
|
237 return
|
|
238
|
|
239 # Clone or update/checkout the repository in the temp directory.
|
|
240 clone_dir = os.path.join(tmp_dir, 'clone')
|
|
241 if not os.path.exists(clone_dir):
|
|
242 logger.info("Cloning '%s' into: %s" % (repo_dir, clone_dir))
|
|
243 repo.clone(repo_dir, clone_dir)
|
|
244 else:
|
|
245 logger.info("Pulling changes from '%s'." % repo_dir)
|
|
246 repo.pull(clone_dir, repo_dir)
|
|
247 repo.update(clone_dir, None)
|
|
248
|
0
|
249 # Process tags!
|
|
250 use_shell = config_sec.get('use_shell') in ['1', 'yes', 'true']
|
|
251 for tn, ti in cache['tags'].items():
|
|
252 if ti['processed']:
|
|
253 logger.info("Skipping '%s'." % tn)
|
|
254 continue
|
|
255
|
|
256 logger.info("Updating repo to '%s'." % tn)
|
|
257 repo.update(clone_dir, ti['id'])
|
|
258
|
|
259 command = config_sec['command'] % {
|
|
260 'rev_id': ti['id'],
|
|
261 'root_dir': clone_dir,
|
|
262 'tag': tn}
|
|
263 logger.info("Running: %s" % command)
|
|
264 subprocess.check_call(command, shell=use_shell)
|
|
265
|
|
266 ti['processed'] = True
|
|
267 with open(cache_file, 'w') as fp:
|
|
268 json.dump(cache, fp)
|
|
269
|
|
270
|
|
271 if __name__ == '__main__':
|
|
272 logging.basicConfig(level=logging.INFO, format='%(message)s')
|
|
273 main()
|
|
274
|