77import base64
88import calendar
99import codecs
10- import datetime
1110import errno
1211import getpass
1312import json
3736
3837FNULL = open (os .devnull , 'w' )
3938
40-
41- def _get_log_date ():
42- return datetime .datetime .isoformat (datetime .datetime .now ())
43-
44-
45- def log_info (message ):
46- """
47- Log message (str) or messages (List[str]) to stdout
48- """
49- if type (message ) == str :
50- message = [message ]
51-
52- for msg in message :
53- logging .info (msg )
54-
55-
56- def log_warning (message ):
57- """
58- Log message (str) or messages (List[str]) to stderr
59- """
60- if type (message ) == str :
61- message = [message ]
62-
63- for msg in message :
64- logging .warning (msg )
39+ logger = logging .getLogger (__name__ )
6540
6641
6742def logging_subprocess (popenargs ,
@@ -77,7 +52,7 @@ def logging_subprocess(popenargs,
7752 child = subprocess .Popen (popenargs , stdout = subprocess .PIPE ,
7853 stderr = subprocess .PIPE , ** kwargs )
7954 if sys .platform == 'win32' :
80- log_info ("Windows operating system detected - no subprocess logging will be returned" )
55+ logger . info ("Windows operating system detected - no subprocess logging will be returned" )
8156
8257 log_level = {child .stdout : stdout_log_level ,
8358 child .stderr : stderr_log_level }
@@ -139,6 +114,11 @@ def parse_args(args=None):
139114 metavar = 'USER' ,
140115 type = str ,
141116 help = 'github username' )
117+ parser .add_argument ('-q' ,
118+ '--quiet' ,
119+ action = 'store_true' ,
120+ dest = 'quiet' ,
121+ help = 'supress non-error log messages' )
142122 parser .add_argument ('-u' ,
143123 '--username' ,
144124 dest = 'username' ,
@@ -441,29 +421,29 @@ def retrieve_data_gen(args, template, query_args=None, single_request=False):
441421 try :
442422 response = json .loads (r .read ().decode ('utf-8' ))
443423 except IncompleteRead :
444- log_warning ("Incomplete read error detected" )
424+ logger . warning ("Incomplete read error detected" )
445425 read_error = True
446426 except json .decoder .JSONDecodeError :
447- log_warning ("JSON decode error detected" )
427+ logger . warning ("JSON decode error detected" )
448428 read_error = True
449429 except TimeoutError :
450- log_warning ("Tiemout error detected" )
430+ logger . warning ("Tiemout error detected" )
451431 read_error = True
452432 else :
453433 read_error = False
454434
455435 # be gentle with API request limit and throttle requests if remaining requests getting low
456436 limit_remaining = int (r .headers .get ('x-ratelimit-remaining' , 0 ))
457437 if args .throttle_limit and limit_remaining <= args .throttle_limit :
458- log_info (
438+ logger . info (
459439 'API request limit hit: {} requests left, pausing further requests for {}s' .format (
460440 limit_remaining ,
461441 args .throttle_pause ))
462442 time .sleep (args .throttle_pause )
463443
464444 retries = 0
465445 while retries < 3 and (status_code == 502 or read_error ):
466- log_warning ('API request failed. Retrying in 5 seconds' )
446+ logger . warning ('API request failed. Retrying in 5 seconds' )
467447 retries += 1
468448 time .sleep (5 )
469449 request = _construct_request (per_page , page , query_args , template , auth , as_app = args .as_app ) # noqa
@@ -474,13 +454,13 @@ def retrieve_data_gen(args, template, query_args=None, single_request=False):
474454 response = json .loads (r .read ().decode ('utf-8' ))
475455 read_error = False
476456 except IncompleteRead :
477- log_warning ("Incomplete read error detected" )
457+ logger . warning ("Incomplete read error detected" )
478458 read_error = True
479459 except json .decoder .JSONDecodeError :
480- log_warning ("JSON decode error detected" )
460+ logger . warning ("JSON decode error detected" )
481461 read_error = True
482462 except TimeoutError :
483- log_warning ("Tiemout error detected" )
463+ logger . warning ("Tiemout error detected" )
484464 read_error = True
485465
486466 if status_code != 200 :
@@ -532,12 +512,12 @@ def _get_response(request, auth, template):
532512 errors , should_continue = _request_http_error (exc , auth , errors ) # noqa
533513 r = exc
534514 except URLError as e :
535- log_warning (e .reason )
515+ logger . warning (e .reason )
536516 should_continue = _request_url_error (template , retry_timeout )
537517 if not should_continue :
538518 raise
539519 except socket .error as e :
540- log_warning (e .strerror )
520+ logger . warning (e .strerror )
541521 should_continue = _request_url_error (template , retry_timeout )
542522 if not should_continue :
543523 raise
@@ -563,7 +543,7 @@ def _construct_request(per_page, page, query_args, template, auth, as_app=None):
563543 auth = auth .encode ('ascii' )
564544 request .add_header ('Authorization' , 'token ' .encode ('ascii' ) + auth )
565545 request .add_header ('Accept' , 'application/vnd.github.machine-man-preview+json' )
566- log_info ('Requesting {}?{}' .format (template , querystring ))
546+ logger . info ('Requesting {}?{}' .format (template , querystring ))
567547 return request
568548
569549
@@ -587,10 +567,10 @@ def _request_http_error(exc, auth, errors):
587567 delta = max (10 , reset - gm_now )
588568
589569 limit = headers .get ('x-ratelimit-limit' )
590- log_warning ('Exceeded rate limit of {} requests; waiting {} seconds to reset' .format (limit , delta )) # noqa
570+ logger . warning ('Exceeded rate limit of {} requests; waiting {} seconds to reset' .format (limit , delta )) # noqa
591571
592572 if auth is None :
593- log_info ('Hint: Authenticate to raise your GitHub rate limit' )
573+ logger . info ('Hint: Authenticate to raise your GitHub rate limit' )
594574
595575 time .sleep (delta )
596576 should_continue = True
@@ -600,7 +580,7 @@ def _request_http_error(exc, auth, errors):
600580def _request_url_error (template , retry_timeout ):
601581 # Incase of a connection timing out, we can retry a few time
602582 # But we won't crash and not back-up the rest now
603- log_info ('{} timed out' .format (template ))
583+ logger . info ('{} timed out' .format (template ))
604584 retry_timeout -= 1
605585
606586 if retry_timeout >= 0 :
@@ -645,14 +625,14 @@ def download_file(url, path, auth):
645625 f .write (chunk )
646626 except HTTPError as exc :
647627 # Gracefully handle 404 responses (and others) when downloading from S3
648- log_warning ('Skipping download of asset {0} due to HTTPError: {1}' .format (url , exc .reason ))
628+ logger . warning ('Skipping download of asset {0} due to HTTPError: {1}' .format (url , exc .reason ))
649629 except URLError as e :
650630 # Gracefully handle other URL errors
651- log_warning ('Skipping download of asset {0} due to URLError: {1}' .format (url , e .reason ))
631+ logger . warning ('Skipping download of asset {0} due to URLError: {1}' .format (url , e .reason ))
652632 except socket .error as e :
653633 # Gracefully handle socket errors
654634 # TODO: Implement retry logic
655- log_warning ('Skipping download of asset {0} due to socker error: {1}' .format (url , e .strerror ))
635+ logger . warning ('Skipping download of asset {0} due to socker error: {1}' .format (url , e .strerror ))
656636
657637
658638def get_authenticated_user (args ):
@@ -668,15 +648,15 @@ def check_git_lfs_install():
668648
669649
670650def retrieve_repositories (args , authenticated_user ):
671- log_info ('Retrieving repositories' )
651+ logger . info ('Retrieving repositories' )
672652 single_request = False
673653 if args .user == authenticated_user ['login' ]:
674654 # we must use the /user/repos API to be able to access private repos
675655 template = 'https://{0}/user/repos' .format (
676656 get_github_api_host (args ))
677657 else :
678658 if args .private and not args .organization :
679- log_warning ('Authenticated user is different from user being backed up, thus private repositories cannot be accessed' )
659+ logger . warning ('Authenticated user is different from user being backed up, thus private repositories cannot be accessed' )
680660 template = 'https://{0}/users/{1}/repos' .format (
681661 get_github_api_host (args ),
682662 args .user )
@@ -724,7 +704,7 @@ def retrieve_repositories(args, authenticated_user):
724704
725705
726706def filter_repositories (args , unfiltered_repositories ):
727- log_info ('Filtering repositories' )
707+ logger . info ('Filtering repositories' )
728708
729709 repositories = []
730710 for r in unfiltered_repositories :
@@ -755,7 +735,7 @@ def filter_repositories(args, unfiltered_repositories):
755735
756736
757737def backup_repositories (args , output_directory , repositories ):
758- log_info ('Backing up repositories' )
738+ logger . info ('Backing up repositories' )
759739 repos_template = 'https://{0}/repos' .format (get_github_api_host (args ))
760740
761741 if args .incremental :
@@ -837,7 +817,7 @@ def backup_issues(args, repo_cwd, repository, repos_template):
837817 if args .skip_existing and has_issues_dir :
838818 return
839819
840- log_info ('Retrieving {0} issues' .format (repository ['full_name' ]))
820+ logger . info ('Retrieving {0} issues' .format (repository ['full_name' ]))
841821 issue_cwd = os .path .join (repo_cwd , 'issues' )
842822 mkdir_p (repo_cwd , issue_cwd )
843823
@@ -873,7 +853,7 @@ def backup_issues(args, repo_cwd, repository, repos_template):
873853 issues_skipped_message = ' (skipped {0} pull requests)' .format (
874854 issues_skipped )
875855
876- log_info ('Saving {0} issues to disk{1}' .format (
856+ logger . info ('Saving {0} issues to disk{1}' .format (
877857 len (list (issues .keys ())), issues_skipped_message ))
878858 comments_template = _issue_template + '/{0}/comments'
879859 events_template = _issue_template + '/{0}/events'
@@ -895,7 +875,7 @@ def backup_pulls(args, repo_cwd, repository, repos_template):
895875 if args .skip_existing and has_pulls_dir :
896876 return
897877
898- log_info ('Retrieving {0} pull requests' .format (repository ['full_name' ])) # noqa
878+ logger . info ('Retrieving {0} pull requests' .format (repository ['full_name' ])) # noqa
899879 pulls_cwd = os .path .join (repo_cwd , 'pulls' )
900880 mkdir_p (repo_cwd , pulls_cwd )
901881
@@ -939,7 +919,7 @@ def backup_pulls(args, repo_cwd, repository, repos_template):
939919 single_request = True
940920 )[0 ]
941921
942- log_info ('Saving {0} pull requests to disk' .format (
922+ logger . info ('Saving {0} pull requests to disk' .format (
943923 len (list (pulls .keys ()))))
944924 comments_template = _pulls_template + '/{0}/comments'
945925 commits_template = _pulls_template + '/{0}/commits'
@@ -961,7 +941,7 @@ def backup_milestones(args, repo_cwd, repository, repos_template):
961941 if args .skip_existing and os .path .isdir (milestone_cwd ):
962942 return
963943
964- log_info ('Retrieving {0} milestones' .format (repository ['full_name' ]))
944+ logger . info ('Retrieving {0} milestones' .format (repository ['full_name' ]))
965945 mkdir_p (repo_cwd , milestone_cwd )
966946
967947 template = '{0}/{1}/milestones' .format (repos_template ,
@@ -977,7 +957,7 @@ def backup_milestones(args, repo_cwd, repository, repos_template):
977957 for milestone in _milestones :
978958 milestones [milestone ['number' ]] = milestone
979959
980- log_info ('Saving {0} milestones to disk' .format (
960+ logger . info ('Saving {0} milestones to disk' .format (
981961 len (list (milestones .keys ()))))
982962 for number , milestone in list (milestones .items ()):
983963 milestone_file = '{0}/{1}.json' .format (milestone_cwd , number )
@@ -1000,7 +980,7 @@ def backup_labels(args, repo_cwd, repository, repos_template):
1000980def backup_hooks (args , repo_cwd , repository , repos_template ):
1001981 auth = get_auth (args )
1002982 if not auth :
1003- log_info ("Skipping hooks since no authentication provided" )
983+ logger . info ("Skipping hooks since no authentication provided" )
1004984 return
1005985 hook_cwd = os .path .join (repo_cwd , 'hooks' )
1006986 output_file = '{0}/hooks.json' .format (hook_cwd )
@@ -1013,15 +993,15 @@ def backup_hooks(args, repo_cwd, repository, repos_template):
1013993 output_file ,
1014994 hook_cwd )
1015995 except SystemExit :
1016- log_info ("Unable to read hooks, skipping" )
996+ logger . info ("Unable to read hooks, skipping" )
1017997
1018998
1019999def backup_releases (args , repo_cwd , repository , repos_template , include_assets = False ):
10201000 repository_fullname = repository ['full_name' ]
10211001
10221002 # give release files somewhere to live & log intent
10231003 release_cwd = os .path .join (repo_cwd , 'releases' )
1024- log_info ('Retrieving {0} releases' .format (repository_fullname ))
1004+ logger . info ('Retrieving {0} releases' .format (repository_fullname ))
10251005 mkdir_p (repo_cwd , release_cwd )
10261006
10271007 query_args = {}
@@ -1030,7 +1010,7 @@ def backup_releases(args, repo_cwd, repository, repos_template, include_assets=F
10301010 releases = retrieve_data (args , release_template , query_args = query_args )
10311011
10321012 # for each release, store it
1033- log_info ('Saving {0} releases to disk' .format (len (releases )))
1013+ logger . info ('Saving {0} releases to disk' .format (len (releases )))
10341014 for release in releases :
10351015 release_name = release ['tag_name' ]
10361016 release_name_safe = release_name .replace ('/' , '__' )
@@ -1075,12 +1055,12 @@ def fetch_repository(name,
10751055 stderr = FNULL ,
10761056 shell = True )
10771057 if initialized == 128 :
1078- log_info ("Skipping {0} ({1}) since it's not initialized" .format (
1058+ logger . info ("Skipping {0} ({1}) since it's not initialized" .format (
10791059 name , masked_remote_url ))
10801060 return
10811061
10821062 if clone_exists :
1083- log_info ('Updating {0} in {1}' .format (name , local_dir ))
1063+ logger . info ('Updating {0} in {1}' .format (name , local_dir ))
10841064
10851065 remotes = subprocess .check_output (['git' , 'remote' , 'show' ],
10861066 cwd = local_dir )
@@ -1101,7 +1081,7 @@ def fetch_repository(name,
11011081 git_command = ['git' , 'fetch' , '--all' , '--force' , '--tags' , '--prune' ]
11021082 logging_subprocess (git_command , None , cwd = local_dir )
11031083 else :
1104- log_info ('Cloning {0} repository from {1} to {2}' .format (
1084+ logger . info ('Cloning {0} repository from {1} to {2}' .format (
11051085 name ,
11061086 masked_remote_url ,
11071087 local_dir ))
@@ -1161,11 +1141,11 @@ def backup_account(args, output_directory):
11611141def _backup_data (args , name , template , output_file , output_directory ):
11621142 skip_existing = args .skip_existing
11631143 if not skip_existing or not os .path .exists (output_file ):
1164- log_info ('Retrieving {0} {1}' .format (args .user , name ))
1144+ logger . info ('Retrieving {0} {1}' .format (args .user , name ))
11651145 mkdir_p (output_directory )
11661146 data = retrieve_data (args , template )
11671147
1168- log_info ('Writing {0} {1} to disk' .format (len (data ), name ))
1148+ logger . info ('Writing {0} {1} to disk' .format (len (data ), name ))
11691149 with codecs .open (output_file , 'w' , encoding = 'utf-8' ) as f :
11701150 json_dump (data , f )
11711151
0 commit comments