summaryrefslogtreecommitdiffstats
path: root/meta/classes/distrodata.bbclass
blob: 945ff5344c831e0625c4d132ea6a017fa817091e (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
addhandler distro_eventhandler
python distro_eventhandler() {

    if bb.event.getName(e) == "BuildStarted":
        import oe.distro_check as dc
        logfile = dc.create_log_file(e.data, "distrodata.csv")
        lf = bb.utils.lockfile("%s.lock" % logfile)
        f = open(logfile, "a")
        f.write("Package,Description,Owner,License,VerMatch,Version,Upsteam,Reason,Recipe Status,Distro 1,Distro 2,Distro 3\n")
        f.close()
        bb.utils.unlockfile(lf)

    return
}

addtask distrodata_np
do_distrodata_np[nostamp] = "1"
python do_distrodata_np() {
        localdata = bb.data.createCopy(d)
        pn = d.getVar("PN", True)
        bb.note("Package Name: %s" % pn)

        import oe.distro_check as dist_check
        tmpdir = d.getVar('TMPDIR', True)
        distro_check_dir = os.path.join(tmpdir, "distro_check")
        datetime = localdata.getVar('DATETIME', True)
        dist_check.update_distro_data(distro_check_dir, datetime)

        if pn.find("-native") != -1:
            pnstripped = pn.split("-native")
            bb.note("Native Split: %s" % pnstripped)
            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)

        if pn.find("nativesdk-") != -1:
            pnstripped = pn.replace("nativesdk-", "")
            bb.note("Native Split: %s" % pnstripped)
            localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)

        if pn.find("-cross") != -1:
            pnstripped = pn.split("-cross")
            bb.note("cross Split: %s" % pnstripped)
            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)

        if pn.find("-crosssdk") != -1:
            pnstripped = pn.split("-crosssdk")
            bb.note("cross Split: %s" % pnstripped)
            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)

        if pn.find("-initial") != -1:
            pnstripped = pn.split("-initial")
            bb.note("initial Split: %s" % pnstripped)
            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)

        """generate package information from .bb file"""
        pname = localdata.getVar('PN', True)
        pcurver = localdata.getVar('PV', True)
        pdesc = localdata.getVar('DESCRIPTION', True)
        if pdesc is not None:
                pdesc = pdesc.replace(',','')
                pdesc = pdesc.replace('\n','')

        pgrp = localdata.getVar('SECTION', True)
        plicense = localdata.getVar('LICENSE', True).replace(',','_')

        rstatus = localdata.getVar('RECIPE_COLOR', True)
        if rstatus is not None:
                rstatus = rstatus.replace(',','')
                
        pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION', True)
        if pcurver == pupver:
                vermatch="1"
        else:
                vermatch="0"
        noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True)
        if noupdate_reason is None:
                noupdate="0"
        else:
                noupdate="1"
                noupdate_reason = noupdate_reason.replace(',','')

        maintainer = localdata.getVar('RECIPE_MAINTAINER', True)
        rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE', True)
        result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata)

        bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s\n" % \
                  (pname, pdesc, maintainer, plicense, vermatch, pcurver, pupver, noupdate_reason, rstatus))
        line = pn
        for i in result:
            line = line + "," + i
        bb.note("%s\n" % line)
}

addtask distrodata
do_distrodata[nostamp] = "1"
python do_distrodata() {
        logpath = d.getVar('LOG_DIR', True)
        bb.utils.mkdirhier(logpath)
        logfile = os.path.join(logpath, "distrodata.csv")

        import oe.distro_check as dist_check
        localdata = bb.data.createCopy(d)
        tmpdir = d.getVar('TMPDIR', True)
        distro_check_dir = os.path.join(tmpdir, "distro_check")
        datetime = localdata.getVar('DATETIME', True)
        dist_check.update_distro_data(distro_check_dir, datetime)

        pn = d.getVar("PN", True)
        bb.note("Package Name: %s" % pn)

        if pn.find("-native") != -1:
            pnstripped = pn.split("-native")
            bb.note("Native Split: %s" % pnstripped)
            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)

        if pn.find("-cross") != -1:
            pnstripped = pn.split("-cross")
            bb.note("cross Split: %s" % pnstripped)
            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)

        if pn.find("-initial") != -1:
            pnstripped = pn.split("-initial")
            bb.note("initial Split: %s" % pnstripped)
            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)

        """generate package information from .bb file"""
        pname = localdata.getVar('PN', True)
        pcurver = localdata.getVar('PV', True)
        pdesc = localdata.getVar('DESCRIPTION', True)
        if pdesc is not None:
                pdesc = pdesc.replace(',','')
                pdesc = pdesc.replace('\n','')

        pgrp = localdata.getVar('SECTION', True)
        plicense = localdata.getVar('LICENSE', True).replace(',','_')

        rstatus = localdata.getVar('RECIPE_COLOR', True)
        if rstatus is not None:
                rstatus = rstatus.replace(',','')
                
        pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION', True)
        if pcurver == pupver:
                vermatch="1"
        else:
                vermatch="0"

        noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True)
        if noupdate_reason is None:
                noupdate="0"
        else:
                noupdate="1"
                noupdate_reason = noupdate_reason.replace(',','')

        maintainer = localdata.getVar('RECIPE_MAINTAINER', True)
        rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE', True)
        # do the comparison
        result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata)

        lf = bb.utils.lockfile("%s.lock" % logfile)
        f = open(logfile, "a")
        f.write("%s,%s,%s,%s,%s,%s,%s,%s,%s" % \
                  (pname, pdesc, maintainer, plicense, vermatch, pcurver, pupver, noupdate_reason, rstatus))
        line = ""
        for i in result:
            line = line + "," + i
        f.write(line + "\n")
        f.close()
        bb.utils.unlockfile(lf)
}

addtask distrodataall after do_distrodata
do_distrodataall[recrdeptask] = "do_distrodataall do_distrodata"
do_distrodataall[nostamp] = "1"
do_distrodataall() {
        :
}

addhandler checkpkg_eventhandler
python checkpkg_eventhandler() {
    if bb.event.getName(e) == "BuildStarted":
        import oe.distro_check as dc
        logfile = dc.create_log_file(e.data, "checkpkg.csv")

        lf = bb.utils.lockfile("%s.lock" % logfile)
        f = open(logfile, "a")
        f.write("Package\tVersion\tUpver\tLicense\tSection\tHome\tRelease\tDepends\tBugTracker\tPE\tDescription\tStatus\tTracking\tURI\tMAINTAINER\n")
        f.close()
        bb.utils.unlockfile(lf)
    return
}

addtask checkpkg
do_checkpkg[nostamp] = "1"
python do_checkpkg() {
        localdata = bb.data.createCopy(d)
        import sys
        import re
        import tempfile
        import subprocess

        """
        sanity check to ensure same name and type. Match as many patterns as possible
        such as:
                gnome-common-2.20.0.tar.gz (most common format)
                gtk+-2.90.1.tar.gz
                xf86-input-synaptics-12.6.9.tar.gz
                dri2proto-2.3.tar.gz
                blktool_4.orig.tar.gz
                libid3tag-0.15.1b.tar.gz
                unzip552.tar.gz
                icu4c-3_6-src.tgz
                genext2fs_1.3.orig.tar.gz
                gst-fluendo-mp3
        """
        prefix1 = "[a-zA-Z][a-zA-Z0-9]*([\-_][a-zA-Z]\w+)*[\-_]"        # match most patterns which uses "-" as separator to version digits
        prefix2 = "[a-zA-Z]+"                        # a loose pattern such as for unzip552.tar.gz
        prefix3 = "[0-9a-zA-Z]+"                        # a loose pattern such as for 80325-quicky-0.4.tar.gz
        prefix = "(%s|%s|%s)" % (prefix1, prefix2, prefix3)
        suffix = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm)"
        suffixtuple = ("tar.gz", "tgz", "zip", "tar.bz2", "tar.xz", "src.rpm")

        sinterstr = "(?P<name>%s?)(?P<ver>.*)" % prefix
        sdirstr = "(?P<name>%s)(?P<ver>.*)\.(?P<type>%s$)" % (prefix, suffix)

        def parse_inter(s):
                m = re.search(sinterstr, s)
                if not m:
                        return None
                else:
                        return (m.group('name'), m.group('ver'), "")

        def parse_dir(s):
                m = re.search(sdirstr, s)
                if not m:
                        return None
                else:
                        return (m.group('name'), m.group('ver'), m.group('type'))

        """
        Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
        purpose. PE is cleared in comparison as it's not for build, and PV is cleared too
        for simplicity as it's somehow difficult to get from various upstream format
        """
        def __vercmp(old, new):
                (on, ov, ot) = old
                (en, ev, et) = new
                if on != en or (et and et not in suffixtuple):
                        return 0
                ov = re.search("[\d|\.]+[^a-zA-Z]+", ov).group()
                ev = re.search("[\d|\.]+[^a-zA-Z]+", ev).group()
                return bb.utils.vercmp(("0", ov, ""), ("0", ev, ""))

        """
        wrapper for fetch upstream directory info
                'url'        - upstream link customized by regular expression
                'd'        - database
                'tmpf'        - tmpfile for fetcher output
        We don't want to exit whole build due to one recipe error. So handle all exceptions 
        gracefully w/o leaking to outer. 
        """
        def internal_fetch_wget(url, d, tmpf):
                status = "ErrFetchUnknown"
                """
                Clear internal url cache as it's a temporary check. Not doing so will have 
                bitbake check url multiple times when looping through a single url
                """
                fn = d.getVar('FILE', True)
                bb.fetch2.urldata_cache[fn] = {}

                """
                To avoid impacting bitbake build engine, this trick is required for reusing bitbake
                interfaces. bb.fetch.go() is not appliable as it checks downloaded content in ${DL_DIR}
                while we don't want to pollute that place. So bb.fetch2.checkstatus() is borrowed here
                which is designed for check purpose but we override check command for our own purpose
                """
                ld = bb.data.createCopy(d)
                d.setVar('CHECKCOMMAND_wget', "/usr/bin/env wget -t 1 --passive-ftp -O %s --user-agent=\"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12\" '${URI}'" \
                                        % tmpf.name)
                bb.data.update_data(ld)

                try:
                        fetcher = bb.fetch2.Fetch([url], ld)
                        fetcher.checkstatus()
                        status = "SUCC"
                except bb.fetch2.BBFetchException, e:
                        status = "ErrFetch"

                return status

        """
        Check on middle version directory such as "2.4/" in "http://xxx/2.4/pkg-2.4.1.tar.gz", 
                'url'        - upstream link customized by regular expression
                'd'        - database
                'curver' - current version
        Return new version if success, or else error in "Errxxxx" style
        """
        def check_new_dir(url, curver, d):
                pn = d.getVar('PN', True)
                f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn)
                status = internal_fetch_wget(url, d, f)
                fhtml = f.read()
                if status == "SUCC" and len(fhtml):
                        newver = parse_inter(curver)

                        """
                        match "*4.1/">*4.1/ where '*' matches chars
                        N.B. add package name, only match for digits
                        """
                        m = re.search("^%s" % prefix, curver)
                        if m:
                                s = "%s[^\d\"]*?(\d+[\.\-_])+\d+/?" % m.group()
                        else:
                                s = "(\d+[\.\-_])+\d+/?"
                                
                        searchstr = "[hH][rR][eE][fF]=\"%s\">" % s
                        reg = re.compile(searchstr)

                        valid = 0
                        for line in fhtml.split("\n"):
                                if line.find(curver) >= 0:
                                        valid = 1
                                m = reg.search(line)
                                if m:
                                        ver = m.group().split("\"")[1]
                                        ver = ver.strip("/")
                                        ver = parse_inter(ver)
                                        if ver and __vercmp(newver, ver) < 0:
                                                newver = ver

                        """Expect a match for curver in directory list, or else it indicates unknown format"""
                        if not valid:
                                status = "ErrParseInterDir"
                        else:
                                """rejoin the path name"""
                                status = newver[0] + newver[1]
                elif not len(fhtml):
                        status = "ErrHostNoDir"

                f.close()
                if status != "ErrHostNoDir" and re.match("Err", status):
                        logpath = d.getVar('LOG_DIR', True)
                        subprocess.call("cp %s %s/" % (f.name, logpath), shell=True)
                os.unlink(f.name)
                return status

        """
        Check on the last directory to search '2.4.1' in "http://xxx/2.4/pkg-2.4.1.tar.gz", 
                'url'        - upstream link customized by regular expression
                'd'        - database
                'curname' - current package name
        Return new version if success, or else error in "Errxxxx" style
        """
        def check_new_version(url, curname, d):
                """possible to have no version in pkg name, such as spectrum-fw"""
                if not re.search("\d+", curname):
                        return pcurver
                pn = d.getVar('PN', True)
                f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn)
                status = internal_fetch_wget(url, d, f)
                fhtml = f.read()

                if status == "SUCC" and len(fhtml):
                        newver = parse_dir(curname)

                        """match "{PN}-5.21.1.tar.gz">{PN}-5.21.1.tar.gz """
                        pn1 = re.search("^%s" % prefix, curname).group()
                        
                        s = "[^\"]*%s[^\d\"]*?(\d+[\.\-_])+[^\"]*" % pn1
                        searchstr = "[hH][rR][eE][fF]=\"%s\".*[>\"]" % s
                        reg = re.compile(searchstr)
        
                        valid = 0
                        for line in fhtml.split("\n"):
                                m = reg.search(line)
                                if m:
                                        valid = 1
                                        ver = m.group().split("\"")[1].split("/")[-1]
                                        if ver == "download":
                                                ver = m.group().split("\"")[1].split("/")[-2]
                                        ver = parse_dir(ver)
                                        if ver and __vercmp(newver, ver) < 0:
                                                newver = ver
        
                        """Expect a match for curver in directory list, or else it indicates unknown format"""
                        if not valid:
                                status = "ErrParseDir"
                        else:
                                """newver still contains a full package name string"""
                                status = re.search("(\d+[\.\-_])*(\d+[0-9a-zA-Z]*)", newver[1]).group()
                                if "_" in status:
                                        status = re.sub("_",".",status)
                                elif "-" in status:
                                        status = re.sub("-",".",status)
                elif not len(fhtml):
                        status = "ErrHostNoDir"

                f.close()
                """if host hasn't directory information, no need to save tmp file"""
                if status != "ErrHostNoDir" and re.match("Err", status):
                        logpath = d.getVar('LOG_DIR', True)
                        subprocess.call("cp %s %s/" % (f.name, logpath), shell=True)
                os.unlink(f.name)
                return status

        """first check whether a uri is provided"""
        src_uri = d.getVar('SRC_URI', True)
        if not src_uri:
                return

        """initialize log files."""
        logpath = d.getVar('LOG_DIR', True)
        bb.utils.mkdirhier(logpath)
        logfile = os.path.join(logpath, "checkpkg.csv")

        """generate package information from .bb file"""
        pname = d.getVar('PN', True)

        if pname.find("-native") != -1:
            pnstripped = pname.split("-native")
            bb.note("Native Split: %s" % pnstripped)
            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)

        if pname.find("-cross") != -1:
            pnstripped = pname.split("-cross")
            bb.note("cross Split: %s" % pnstripped)
            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)

        if pname.find("-initial") != -1:
            pnstripped = pname.split("-initial")
            bb.note("initial Split: %s" % pnstripped)
            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)

        pdesc = localdata.getVar('DESCRIPTION', True)
        pgrp = localdata.getVar('SECTION', True)
        pversion = localdata.getVar('PV', True)
        plicense = localdata.getVar('LICENSE', True)
        psection = localdata.getVar('SECTION', True)
        phome = localdata.getVar('HOMEPAGE', True)
        prelease = localdata.getVar('PR', True)
        pdepends = localdata.getVar('DEPENDS', True)
        pbugtracker = localdata.getVar('BUGTRACKER', True)
        ppe = localdata.getVar('PE', True)
        psrcuri = localdata.getVar('SRC_URI', True)
        maintainer = localdata.getVar('RECIPE_MAINTAINER', True)

        found = 0
        for uri in src_uri.split():
                m = re.compile('(?P<type>[^:]*)').match(uri)
                if not m:
                        raise MalformedUrl(uri)
                elif m.group('type') in ('http', 'https', 'ftp', 'cvs', 'svn', 'git'):
                        found = 1
                        pproto = m.group('type')
                        break
        if not found:
                pproto = "file"
        pupver = "N/A"
        pstatus = "ErrUnknown"

        (type, host, path, user, pswd, parm) = bb.decodeurl(uri)
        if type in ['http', 'https', 'ftp']:
                pcurver = d.getVar('PV', True)
        else:
                pcurver = d.getVar("SRCREV", True)

        if type in ['http', 'https', 'ftp']:
                newver = pcurver
                altpath = path
                dirver = "-"
                curname = "-"
        
                """
                match version number amid the path, such as "5.7" in:
                        http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz        
                N.B. how about sth. like "../5.7/5.8/..."? Not find such example so far :-P
                """
                m = re.search(r"[^/]*(\d+\.)+\d+([\-_]r\d+)*/", path)
                if m:
                        altpath = path.split(m.group())[0]
                        dirver = m.group().strip("/")
        
                        """use new path and remove param. for wget only param is md5sum"""
                        alturi = bb.encodeurl([type, host, altpath, user, pswd, {}])
        
                        newver = check_new_dir(alturi, dirver, d)
                        altpath = path
                        if not re.match("Err", newver) and dirver != newver:
                                altpath = altpath.replace(dirver, newver, True)
                                
                """Now try to acquire all remote files in current directory"""
                if not re.match("Err", newver):
                        curname = altpath.split("/")[-1]
        
                        """get remote name by skipping pacakge name"""
                        m = re.search(r"/.*/", altpath)
                        if not m:
                                altpath = "/"
                        else:
                                altpath = m.group()
        
                        alturi = bb.encodeurl([type, host, altpath, user, pswd, {}])
                        newver = check_new_version(alturi, curname, d)
                        while(newver == "ErrHostNoDir"):
                                if alturi == "/download":
                                        break
                                else:
                                        alturi = "/".join(alturi.split("/")[0:-2]) + "/download"
                                        newver = check_new_version(alturi, curname, d)
                        if not re.match("Err", newver):
                                pupver = newver
                                if pupver != pcurver:
                                        pstatus = "UPDATE"
                                else:
                                        pstatus = "MATCH"
        
                if re.match("Err", newver):
                        pstatus = newver + ":" + altpath + ":" + dirver + ":" + curname
        elif type == 'git':
                if user:
                        gituser = user + '@'
                else:
                        gituser = ""

                if 'protocol' in parm:
                        gitproto = parm['protocol']
                else:
                        gitproto = "git"
                gitcmd = "git ls-remote %s://%s%s%s *tag* 2>&1" % (gitproto, gituser, host, path)
                gitcmd2 = "git ls-remote %s://%s%s%s HEAD 2>&1" % (gitproto, gituser, host, path)
                tmp = os.popen(gitcmd).read()
                tmp2 = os.popen(gitcmd2).read()
                #This is for those repo have tag like: refs/tags/1.2.2
                if tmp:
                        tmpline = tmp.split("\n")
                        verflag = 0
                        for line in tmpline:
                                if len(line)==0:
                                        break;
                                puptag = line.split("/")[-1]
                                puptag = re.search("[0-9][0-9|\.|_]+[0-9]", puptag)
                                if puptag == None:
                                        continue;
                                puptag = puptag.group()
                                puptag = re.sub("_",".",puptag)
                                plocaltag = pversion.split("+")[0]
                                if "git" in plocaltag:
                                        plocaltag = plocaltag.split("-")[0]
                                result = bb.utils.vercmp(("0", puptag, ""), ("0", plocaltag, ""))
                                if result > 0:
                                        verflag = 1
                                        pstatus = "UPDATE"
                                        pupver = puptag
                                elif verflag == 0 :
                                        pupver = plocaltag
                                        pstatus = "MATCH"
                #This is for those no tag repo
                elif tmp2:
                        pupver = tmp2.split("\t")[0]
                        if pupver in pversion:
                                pstatus = "MATCH"
                        else:
                                pstatus = "UPDATE"
                else:
                        pstatus = "ErrGitAccess"
        elif type == 'svn':
                options = []
                if user:
                        options.append("--username %s" % user)
                if pswd:
                        options.append("--password %s" % pswd)
                svnproto = 'svn'
                if 'proto' in parm:
                        svnproto = parm['proto']
                if 'rev' in parm:
                        pcurver = parm['rev']

                svncmd = "svn info %s %s://%s%s/%s/ 2>&1" % (" ".join(options), svnproto, host, path, parm["module"])
                print svncmd
                svninfo = os.popen(svncmd).read()
                for line in svninfo.split("\n"):
                        if re.search("^Last Changed Rev:", line):
                                pupver = line.split(" ")[-1]
                                if pupver in pversion:
                                        pstatus = "MATCH"
                                else:
                                        pstatus = "UPDATE"

                if re.match("Err", pstatus):
                        pstatus = "ErrSvnAccess"
        elif type == 'cvs':
                pupver = "HEAD"
                pstatus = "UPDATE"
        elif type == 'file':
                """local file is always up-to-date"""
                pupver = pcurver
                pstatus = "MATCH"
        else:
                pstatus = "ErrUnsupportedProto"

        if re.match("Err", pstatus):
                pstatus += ":%s%s" % (host, path)

        """Read from manual distro tracking fields as alternative"""
        pmver = d.getVar("RECIPE_UPSTREAM_VERSION", True)
        if not pmver:
                pmver = "N/A"
                pmstatus = "ErrNoRecipeData"
        else:
                if pmver == pcurver:
                        pmstatus = "MATCH"
                else:
                        pmstatus = "UPDATE"
        
        psrcuri = psrcuri.split()[0]
        pdepends = "".join(pdepends.split("\t"))
        pdesc = "".join(pdesc.split("\t"))
        lf = bb.utils.lockfile("%s.lock" % logfile)
        f = open(logfile, "a")
        f.write("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % \
                  (pname,pversion,pupver,plicense,psection, phome,prelease, pdepends,pbugtracker,ppe,pdesc,pstatus,pmver,psrcuri,maintainer))
        f.close()
        bb.utils.unlockfile(lf)
}

addtask checkpkgall after do_checkpkg
do_checkpkgall[recrdeptask] = "do_checkpkgall do_checkpkg"
do_checkpkgall[nostamp] = "1"
do_checkpkgall() {
        :
}

addhandler distro_check_eventhandler
python distro_check_eventhandler() {
    if bb.event.getName(e) == "BuildStarted":
        """initialize log files."""
        import oe.distro_check as dc
        result_file = dc.create_log_file(e.data, "distrocheck.csv")
    return
}

addtask distro_check
do_distro_check[nostamp] = "1"
python do_distro_check() {
    """checks if the package is present in other public Linux distros"""
    import oe.distro_check as dc
    import shutil
    if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('sdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk',d):
        return

    localdata = bb.data.createCopy(d)
    bb.data.update_data(localdata)
    tmpdir = d.getVar('TMPDIR', True)
    distro_check_dir = os.path.join(tmpdir, "distro_check")
    logpath = d.getVar('LOG_DIR', True)
    bb.utils.mkdirhier(logpath)
    result_file = os.path.join(logpath, "distrocheck.csv")
    datetime = localdata.getVar('DATETIME', True)
    dc.update_distro_data(distro_check_dir, datetime)

    # do the comparison
    result = dc.compare_in_distro_packages_list(distro_check_dir, d)

    # save the results
    dc.save_distro_check_result(result, datetime, result_file, d)
}

addtask distro_checkall after do_distro_check
do_distro_checkall[recrdeptask] = "do_distro_checkall do_distro_check"
do_distro_checkall[nostamp] = "1"
do_distro_checkall() {
        :
}
#
#Check Missing License Text.
#Use this task to generate the missing license text data for pkg-report system,
#then we can search those recipes which license text isn't exsit in common-licenses directory
#
addhandler checklicense_eventhandler
python checklicense_eventhandler() {
    if bb.event.getName(e) == "BuildStarted":
        """initialize log files."""
        import oe.distro_check as dc
        logfile = dc.create_log_file(e.data, "missinglicense.csv")
        lf = bb.utils.lockfile("%s.lock" % logfile)
        f = open(logfile, "a")
        f.write("Package\tLicense\tMissingLicense\n")
        f.close()
        bb.utils.unlockfile(lf)
    return
}

addtask checklicense
do_checklicense[nostamp] = "1"
python do_checklicense() {
    import shutil
    logpath = d.getVar('LOG_DIR', True)
    bb.utils.mkdirhier(logpath)
    pn = d.getVar('PN', True)
    logfile = os.path.join(logpath, "missinglicense.csv")
    generic_directory = d.getVar('COMMON_LICENSE_DIR', True)
    license_types = d.getVar('LICENSE', True)
    for license_type in ((license_types.replace('+', '').replace('|', '&')
                          .replace('(', '').replace(')', '').replace(';', '')
                          .replace(',', '').replace(" ", "").split("&"))):
        if not os.path.isfile(os.path.join(generic_directory, license_type)):
            lf = bb.utils.lockfile("%s.lock" % logfile)
            f = open(logfile, "a")
            f.write("%s\t%s\t%s\n" % \
                (pn,license_types,license_type))
            f.close()
            bb.utils.unlockfile(lf)
    return
}

addtask checklicenseall after do_checklicense
do_checklicenseall[recrdeptask] = "do_checklicenseall do_checklicense"
do_checklicenseall[nostamp] = "1"
do_checklicenseall() {
        :
}
cmd, os.getcwd() if destdir is None else destdir)) if not out: out = os.tmpfile() err = out else: err = os.tmpfile() try: subprocess.check_call(cmd, stdout=out, stderr=err, cwd=destdir, shell=isinstance(cmd, str)) except subprocess.CalledProcessError,e: err.seek(0) if printerr: logger.error("%s" % err.read()) raise e err.seek(0) output = err.read() logger.debug("output: %s" % output ) return output def action_init(conf, args): """ Clone component repositories Check git is initialised; if not, copy initial data from component repos """ for name in conf.repos: ldir = conf.repos[name]['local_repo_dir'] if not os.path.exists(ldir): logger.info("cloning %s to %s" %(conf.repos[name]['src_uri'], ldir)) subprocess.check_call("git clone %s %s" % (conf.repos[name]['src_uri'], ldir), shell=True) if not os.path.exists(".git"): runcmd("git init") if conf.history: # Need a common ref for all trees. runcmd('git commit -m "initial empty commit" --allow-empty') startrev = runcmd('git rev-parse master').strip() for name in conf.repos: repo = conf.repos[name] ldir = repo['local_repo_dir'] branch = repo.get('branch', "master") lastrev = repo.get('last_revision', None) if lastrev and lastrev != "HEAD": initialrev = lastrev if branch: if not check_rev_branch(name, ldir, lastrev, branch): sys.exit(1) logger.info("Copying data from %s at specified revision %s..." % (name, lastrev)) else: lastrev = None initialrev = branch logger.info("Copying data from %s..." % name) # Sanity check initialrev and turn it into hash (required for copying history, # because resolving a name ref only works in the component repo). rev = runcmd('git rev-parse %s' % initialrev, ldir).strip() if rev != initialrev: try: refs = runcmd('git show-ref -s %s' % initialrev, ldir).split('\n') if len(set(refs)) > 1: # Happens for example when configured to track # "master" and there is a refs/heads/master. The # traditional behavior from "git archive" (preserved # here) it to choose the first one. This might not be # intended, so at least warn about it. logger.warn("%s: initial revision '%s' not unique, picking result of rev-parse = %s" % (name, initialrev, refs[0])) initialrev = rev except: # show-ref fails for hashes. Skip the sanity warning in that case. pass initialrev = rev dest_dir = repo['dest_dir'] if dest_dir != ".": extract_dir = os.path.join(os.getcwd(), dest_dir) if not os.path.exists(extract_dir): os.makedirs(extract_dir) else: extract_dir = os.getcwd() file_filter = repo.get('file_filter', "") exclude_patterns = repo.get('file_exclude', '').split() def copy_selected_files(initialrev, extract_dir, file_filter, exclude_patterns, ldir, subdir=""): # When working inside a filtered branch which had the # files already moved, we need to prepend the # subdirectory to all filters, otherwise they would # not match. if subdir == '.': subdir = '' elif subdir: subdir = os.path.normpath(subdir) file_filter = ' '.join([subdir + '/' + x for x in file_filter.split()]) exclude_patterns = [subdir + '/' + x for x in exclude_patterns] # To handle both cases, we cd into the target # directory and optionally tell tar to strip the path # prefix when the files were already moved. subdir_components = len(subdir.split(os.path.sep)) if subdir else 0 strip=('--strip-components=%d' % subdir_components) if subdir else '' # TODO: file_filter wild cards do not work (and haven't worked before either), because # a) GNU tar requires a --wildcards parameter before turning on wild card matching. # b) The semantic is not as intendend (src/*.c also matches src/foo/bar.c, # in contrast to the other use of file_filter as parameter of "git archive" # where it only matches .c files directly in src). files = runcmd("git archive %s %s | tar -x -v %s -C %s %s" % (initialrev, subdir, strip, extract_dir, file_filter), ldir) if exclude_patterns: # Implement file removal by letting tar create the # file and then deleting it in the file system # again. Uses the list of files created by tar (easier # than walking the tree). for file in files.split('\n'): for pattern in exclude_patterns: if fnmatch.fnmatch(file, pattern): os.unlink(os.path.join(*([extract_dir] + ['..'] * subdir_components + [file]))) break if not conf.history: copy_selected_files(initialrev, extract_dir, file_filter, exclude_patterns, ldir) else: # First fetch remote history into local repository. # We need a ref for that, so ensure that there is one. refname = "combo-layer-init-%s" % name runcmd("git branch -f %s %s" % (refname, initialrev), ldir) runcmd("git fetch %s %s" % (ldir, refname)) runcmd("git branch -D %s" % refname, ldir) # Make that the head revision. runcmd("git checkout -b %s %s" % (name, initialrev)) # Optional: cut the history by replacing the given # start point(s) with commits providing the same # content (aka tree), but with commit information that # makes it clear that this is an artifically created # commit and nothing the original authors had anything # to do with. since_rev = repo.get('since_revision', '') if since_rev: committer = runcmd('git var GIT_AUTHOR_IDENT').strip() # Same time stamp, no name. author = re.sub('.* (\d+ [+-]\d+)', r'unknown <unknown> \1', committer) logger.info('author %s' % author) for rev in since_rev.split(): # Resolve in component repo... rev = runcmd('git log --oneline --no-abbrev-commit -n1 %s' % rev, ldir).split()[0] # ... and then get the tree in current # one. The commit should be in both repos with # the same tree, but better check here. tree = runcmd('git show -s --pretty=format:%%T %s' % rev).strip() with tempfile.NamedTemporaryFile() as editor: editor.write('''cat >$1 <<EOF tree %s author %s committer %s %s: squashed import of component This commit copies the entire set of files as found in %s %s For more information about previous commits, see the upstream repository. Commit created by combo-layer. EOF ''' % (tree, author, committer, name, name, since_rev)) editor.flush() os.environ['GIT_EDITOR'] = 'sh %s' % editor.name runcmd('git replace --edit %s' % rev) # Optional: rewrite history to change commit messages or to move files. if 'hook' in repo or dest_dir != ".": filter_branch = ['git', 'filter-branch', '--force'] with tempfile.NamedTemporaryFile() as hookwrapper: if 'hook' in repo: # Create a shell script wrapper around the original hook that # can be used by git filter-branch. Hook may or may not have # an absolute path. hook = repo['hook'] hook = os.path.join(os.path.dirname(conf.conffile), '..', hook) # The wrappers turns the commit message # from stdin into a fake patch header. # This is good enough for changing Subject # and commit msg body with normal # combo-layer hooks. hookwrapper.write('''set -e tmpname=$(mktemp) trap "rm $tmpname" EXIT echo -n 'Subject: [PATCH] ' >>$tmpname cat >>$tmpname if ! [ $(tail -c 1 $tmpname | od -A n -t x1) == '0a' ]; then echo >>$tmpname fi echo '---' >>$tmpname %s $tmpname $GIT_COMMIT %s tail -c +18 $tmpname | head -c -4 ''' % (hook, name)) hookwrapper.flush() filter_branch.extend(['--msg-filter', 'bash %s' % hookwrapper.name]) if dest_dir != ".": parent = os.path.dirname(dest_dir) if not parent: parent = '.' # May run outside of the current directory, so do not assume that .git exists. filter_branch.extend(['--tree-filter', 'mkdir -p .git/tmptree && find . -mindepth 1 -maxdepth 1 ! -name .git -print0 | xargs -0 -I SOURCE mv SOURCE .git/tmptree && mkdir -p %s && mv .git/tmptree %s' % (parent, dest_dir)]) filter_branch.append('HEAD') runcmd(filter_branch) runcmd('git update-ref -d refs/original/refs/heads/%s' % name) repo['rewritten_revision'] = runcmd('git rev-parse HEAD').strip() repo['stripped_revision'] = repo['rewritten_revision'] # Optional filter files: remove everything and re-populate using the normal filtering code. # Override any potential .gitignore. if file_filter or exclude_patterns: runcmd('git rm -rf .') if not os.path.exists(extract_dir): os.makedirs(extract_dir) copy_selected_files('HEAD', extract_dir, file_filter, exclude_patterns, '.', subdir=dest_dir) runcmd('git add --all --force .') if runcmd('git status --porcelain'): # Something to commit. runcmd(['git', 'commit', '-m', '''%s: select file subset Files from the component repository were chosen based on the following filters: file_filter = %s file_exclude = %s''' % (name, file_filter or '<empty>', repo.get('file_exclude', '<empty>'))]) repo['stripped_revision'] = runcmd('git rev-parse HEAD').strip() if not lastrev: lastrev = runcmd('git rev-parse %s' % initialrev, ldir).strip() conf.update(name, "last_revision", lastrev, initmode=True) if not conf.history: runcmd("git add .") else: # Create Octopus merge commit according to http://stackoverflow.com/questions/10874149/git-octopus-merge-with-unrelated-repositoies runcmd('git checkout master') merge = ['git', 'merge', '--no-commit'] for name in conf.repos: repo = conf.repos[name] # Use branch created earlier. merge.append(name) # Root all commits which have no parent in the common # ancestor in the new repository. for start in runcmd('git log --pretty=format:%%H --max-parents=0 %s' % name).split('\n'): runcmd('git replace --graft %s %s' % (start, startrev)) try: runcmd(merge) except Exception, error: logger.info('''Merging component repository history failed, perhaps because of merge conflicts. It may be possible to commit anyway after resolving these conflicts. %s''' % error) # Create MERGE_HEAD and MERGE_MSG. "git merge" itself # does not create MERGE_HEAD in case of a (harmless) failure, # and we want certain auto-generated information in the # commit message for future reference and/or automation. with open('.git/MERGE_HEAD', 'w') as head: with open('.git/MERGE_MSG', 'w') as msg: msg.write('repo: initial import of components\n\n') # head.write('%s\n' % startrev) for name in conf.repos: repo = conf.repos[name] # <upstream ref> <rewritten ref> <rewritten + files removed> msg.write('combo-layer-%s: %s %s %s\n' % (name, repo['last_revision'], repo['rewritten_revision'], repo['stripped_revision'])) rev = runcmd('git rev-parse %s' % name).strip() head.write('%s\n' % rev) if conf.localconffile: localadded = True try: runcmd("git rm --cached %s" % conf.localconffile, printerr=False) except subprocess.CalledProcessError: localadded = False if localadded: localrelpath = os.path.relpath(conf.localconffile) runcmd("grep -q %s .gitignore || echo %s >> .gitignore" % (localrelpath, localrelpath)) runcmd("git add .gitignore") logger.info("Added local configuration file %s to .gitignore", localrelpath) logger.info("Initial combo layer repository data has been created; please make any changes if desired and then use 'git commit' to make the initial commit.") else: logger.info("Repository already initialised, nothing to do.") def check_repo_clean(repodir): """ check if the repo is clean exit if repo is dirty """ output=runcmd("git status --porcelain", repodir) r = re.compile('\?\? patch-.*/') dirtyout = [item for item in output.splitlines() if not r.match(item)] if dirtyout: logger.error("git repo %s is dirty, please fix it first", repodir) sys.exit(1) def check_patch(patchfile): f = open(patchfile) ln = f.readline() of = None in_patch = False beyond_msg = False pre_buf = '' while ln: if not beyond_msg: if ln == '---\n': if not of: break in_patch = False beyond_msg = True elif ln.startswith('--- '): # We have a diff in the commit message in_patch = True if not of: print('WARNING: %s contains a diff in its commit message, indenting to avoid failure during apply' % patchfile) of = open(patchfile + '.tmp', 'w') of.write(pre_buf) pre_buf = '' elif in_patch and not ln[0] in '+-@ \n\r': in_patch = False if of: if in_patch: of.write(' ' + ln) else: of.write(ln) else: pre_buf += ln ln = f.readline() f.close() if of: of.close() os.rename(patchfile + '.tmp', patchfile) def drop_to_shell(workdir=None): if not sys.stdin.isatty(): print "Not a TTY so can't drop to shell for resolution, exiting." return False shell = os.environ.get('SHELL', 'bash') print('Dropping to shell "%s"\n' \ 'When you are finished, run the following to continue:\n' \ ' exit -- continue to apply the patches\n' \ ' exit 1 -- abort\n' % shell); ret = subprocess.call([shell], cwd=workdir) if ret != 0: print "Aborting" return False else: return True def check_rev_branch(component, repodir, rev, branch): try: actualbranch = runcmd("git branch --contains %s" % rev, repodir, printerr=False) except subprocess.CalledProcessError as e: if e.returncode == 129: actualbranch = "" else: raise if not actualbranch: logger.error("%s: specified revision %s is invalid!" % (component, rev)) return False branches = [] branchlist = actualbranch.split("\n") for b in branchlist: branches.append(b.strip().split(' ')[-1]) if branch not in branches: logger.error("%s: specified revision %s is not on specified branch %s!" % (component, rev, branch)) return False return True def get_repos(conf, repo_names): repos = [] for name in repo_names: if name.startswith('-'): break else: repos.append(name) for repo in repos: if not repo in conf.repos: logger.error("Specified component '%s' not found in configuration" % repo) sys.exit(1) if not repos: repos = [ repo for repo in conf.repos if conf.repos[repo].get("update", True) ] return repos def action_pull(conf, args): """ update the component repos only """ repos = get_repos(conf, args[1:]) # make sure all repos are clean for name in repos: check_repo_clean(conf.repos[name]['local_repo_dir']) for name in repos: repo = conf.repos[name] ldir = repo['local_repo_dir'] branch = repo.get('branch', "master") logger.info("update branch %s of component repo %s in %s ..." % (branch, name, ldir)) if not conf.hard_reset: # Try to pull only the configured branch. Beware that this may fail # when the branch is currently unknown (for example, after reconfiguring # combo-layer). In that case we need to fetch everything and try the check out # and pull again. try: runcmd("git checkout %s" % branch, ldir, printerr=False) except subprocess.CalledProcessError: output=runcmd("git fetch", ldir) logger.info(output) runcmd("git checkout %s" % branch, ldir) runcmd("git pull --ff-only", ldir) else: output=runcmd("git pull --ff-only", ldir) logger.info(output) else: output=runcmd("git fetch", ldir) logger.info(output) runcmd("git checkout %s" % branch, ldir) runcmd("git reset --hard FETCH_HEAD", ldir) def action_update(conf, args): """ update the component repos generate the patch list apply the generated patches """ components = [arg.split(':')[0] for arg in args[1:]] revisions = {} for arg in args[1:]: if ':' in arg: a = arg.split(':', 1) revisions[a[0]] = a[1] repos = get_repos(conf, components) # make sure combo repo is clean check_repo_clean(os.getcwd()) import uuid patch_dir = "patch-%s" % uuid.uuid4() if not os.path.exists(patch_dir): os.mkdir(patch_dir) # Step 1: update the component repos if conf.nopull: logger.info("Skipping pull (-n)") else: action_pull(conf, ['arg0'] + components) for name in repos: revision = revisions.get(name, None) repo = conf.repos[name] ldir = repo['local_repo_dir'] dest_dir = repo['dest_dir'] branch = repo.get('branch', "master") repo_patch_dir = os.path.join(os.getcwd(), patch_dir, name) # Step 2: generate the patch list and store to patch dir logger.info("Generating patches from %s..." % name) top_revision = revision or branch if not check_rev_branch(name, ldir, top_revision, branch): sys.exit(1) if dest_dir != ".": prefix = "--src-prefix=a/%s/ --dst-prefix=b/%s/" % (dest_dir, dest_dir) else: prefix = "" if repo['last_revision'] == "": logger.info("Warning: last_revision of component %s is not set, starting from the first commit" % name) patch_cmd_range = "--root %s" % top_revision rev_cmd_range = top_revision else: if not check_rev_branch(name, ldir, repo['last_revision'], branch): sys.exit(1) patch_cmd_range = "%s..%s" % (repo['last_revision'], top_revision) rev_cmd_range = patch_cmd_range file_filter = repo.get('file_filter',".") # Filter out unwanted files exclude = repo.get('file_exclude', '') if exclude: for path in exclude.split(): p = "%s/%s" % (dest_dir, path) if dest_dir != '.' else path file_filter += " ':!%s'" % p patch_cmd = "git format-patch -N %s --output-directory %s %s -- %s" % \ (prefix,repo_patch_dir, patch_cmd_range, file_filter) output = runcmd(patch_cmd, ldir) logger.debug("generated patch set:\n%s" % output) patchlist = output.splitlines() rev_cmd = "git rev-list --no-merges %s -- %s" % (rev_cmd_range, file_filter) revlist = runcmd(rev_cmd, ldir).splitlines() # Step 3: Call repo specific hook to adjust patch if 'hook' in repo: # hook parameter is: ./hook patchpath revision reponame count=len(revlist)-1 for patch in patchlist: runcmd("%s %s %s %s" % (repo['hook'], patch, revlist[count], name)) count=count-1 # Step 4: write patch list and revision list to file, for user to edit later patchlist_file = os.path.join(os.getcwd(), patch_dir, "patchlist-%s" % name) repo['patchlist'] = patchlist_file f = open(patchlist_file, 'w') count=len(revlist)-1 for patch in patchlist: f.write("%s %s\n" % (patch, revlist[count])) check_patch(os.path.join(patch_dir, patch)) count=count-1 f.close() # Step 5: invoke bash for user to edit patch and patch list if conf.interactive: print('You may now edit the patch and patch list in %s\n' \ 'For example, you can remove unwanted patch entries from patchlist-*, so that they will be not applied later' % patch_dir); if not drop_to_shell(patch_dir): sys.exit(1) # Step 6: apply the generated and revised patch apply_patchlist(conf, repos) runcmd("rm -rf %s" % patch_dir) # Step 7: commit the updated config file if it's being tracked relpath = os.path.relpath(conf.conffile) try: output = runcmd("git status --porcelain %s" % relpath, printerr=False) except: # Outside the repository output = None if output: logger.info("Committing updated configuration file") if output.lstrip().startswith("M"): # create the "components" string component_str = "all components" if len(components) > 0: # otherwise tell which components were actually changed component_str = ", ".join(components) # expand the template with known values template = Template(conf.commit_msg_template) raw_msg = template.substitute(components = component_str) # sanitize the string before using it in command line msg = raw_msg.replace('"', '\\"') runcmd('git commit -m "%s" %s' % (msg, relpath)) def apply_patchlist(conf, repos): """ apply the generated patch list to combo repo """ for name in repos: repo = conf.repos[name] lastrev = repo["last_revision"] prevrev = lastrev # Get non-blank lines from patch list file patchlist = [] if os.path.exists(repo['patchlist']) or not conf.interactive: # Note: we want this to fail here if the file doesn't exist and we're not in # interactive mode since the file should exist in this case with open(repo['patchlist']) as f: for line in f: line = line.rstrip() if line: patchlist.append(line) ldir = conf.repos[name]['local_repo_dir'] branch = conf.repos[name].get('branch', "master") branchrev = runcmd("git rev-parse %s" % branch, ldir).strip() if patchlist: logger.info("Applying patches from %s..." % name) linecount = len(patchlist) i = 1 for line in patchlist: patchfile = line.split()[0] lastrev = line.split()[1] patchdisp = os.path.relpath(patchfile) if os.path.getsize(patchfile) == 0: logger.info("(skipping %d/%d %s - no changes)" % (i, linecount, patchdisp)) else: cmd = "git am --keep-cr %s-p1 %s" % ('-s ' if repo.get('signoff', True) else '', patchfile) logger.info("Applying %d/%d: %s" % (i, linecount, patchdisp)) try: runcmd(cmd) except subprocess.CalledProcessError: logger.info('Running "git am --abort" to cleanup repo') runcmd("git am --abort") logger.error('"%s" failed' % cmd) logger.info("Please manually apply patch %s" % patchdisp) logger.info("Note: if you exit and continue applying without manually applying the patch, it will be skipped") if not drop_to_shell(): if prevrev != repo['last_revision']: conf.update(name, "last_revision", prevrev) sys.exit(1) prevrev = lastrev i += 1 # Once all patches are applied, we should update # last_revision to the branch head instead of the last # applied patch. The two are not necessarily the same when # the last commit is a merge commit or when the patches at # the branch head were intentionally excluded. # # If we do not do that for a merge commit, the next # combo-layer run will only exclude patches reachable from # one of the merged branches and try to re-apply patches # from other branches even though they were already # copied. # # If patches were intentionally excluded, the next run will # present them again instead of skipping over them. This # may or may not be intended, so the code here is conservative # and only addresses the "head is merge commit" case. if lastrev != branchrev and \ len(runcmd("git show --pretty=format:%%P --no-patch %s" % branch, ldir).split()) > 1: lastrev = branchrev else: logger.info("No patches to apply from %s" % name) lastrev = branchrev if lastrev != repo['last_revision']: conf.update(name, "last_revision", lastrev) def action_splitpatch(conf, args): """ generate the commit patch and split the patch per repo """ logger.debug("action_splitpatch") if len(args) > 1: commit = args[1] else: commit = "HEAD" patchdir = "splitpatch-%s" % commit if not os.path.exists(patchdir): os.mkdir(patchdir) # filerange_root is for the repo whose dest_dir is root "." # and it should be specified by excluding all other repo dest dir # like "-x repo1 -x repo2 -x repo3 ..." filerange_root = "" for name in conf.repos: dest_dir = conf.repos[name]['dest_dir'] if dest_dir != ".": filerange_root = '%s -x "%s/*"' % (filerange_root, dest_dir) for name in conf.repos: dest_dir = conf.repos[name]['dest_dir'] patch_filename = "%s/%s.patch" % (patchdir, name) if dest_dir == ".": cmd = "git format-patch -n1 --stdout %s^..%s | filterdiff -p1 %s > %s" % (commit, commit, filerange_root, patch_filename) else: cmd = "git format-patch --no-prefix -n1 --stdout %s^..%s -- %s > %s" % (commit, commit, dest_dir, patch_filename) runcmd(cmd) # Detect empty patches (including those produced by filterdiff above # that contain only preamble text) if os.path.getsize(patch_filename) == 0 or runcmd("filterdiff %s" % patch_filename) == "": os.remove(patch_filename) logger.info("(skipping %s - no changes)", name) else: logger.info(patch_filename) def action_error(conf, args): logger.info("invalid action %s" % args[0]) actions = { "init": action_init, "update": action_update, "pull": action_pull, "splitpatch": action_splitpatch, } def main(): parser = optparse.OptionParser( version = "Combo Layer Repo Tool version %s" % __version__, usage = """%prog [options] action Create and update a combination layer repository from multiple component repositories. Action: init initialise the combo layer repo update [components] get patches from component repos and apply them to the combo repo pull [components] just pull component repos only splitpatch [commit] generate commit patch and split per component, default commit is HEAD""") parser.add_option("-c", "--conf", help = "specify the config file (conf/combo-layer.conf is the default).", action = "store", dest = "conffile", default = "conf/combo-layer.conf") parser.add_option("-i", "--interactive", help = "interactive mode, user can edit the patch list and patches", action = "store_true", dest = "interactive", default = False) parser.add_option("-D", "--debug", help = "output debug information", action = "store_true", dest = "debug", default = False) parser.add_option("-n", "--no-pull", help = "skip pulling component repos during update", action = "store_true", dest = "nopull", default = False) parser.add_option("--hard-reset", help = "instead of pull do fetch and hard-reset in component repos", action = "store_true", dest = "hard_reset", default = False) parser.add_option("-H", "--history", help = "import full history of components during init", action = "store_true", default = False) options, args = parser.parse_args(sys.argv) # Dispatch to action handler if len(args) == 1: logger.error("No action specified, exiting") parser.print_help() elif args[1] not in actions: logger.error("Unsupported action %s, exiting\n" % (args[1])) parser.print_help() elif not os.path.exists(options.conffile): logger.error("No valid config file, exiting\n") parser.print_help() else: if options.debug: logger.setLevel(logging.DEBUG) confdata = Configuration(options) initmode = (args[1] == 'init') confdata.sanity_check(initmode) actions.get(args[1], action_error)(confdata, args[1:]) if __name__ == "__main__": try: ret = main() except Exception: ret = 1 import traceback traceback.print_exc() sys.exit(ret)