diff --git a/.gitignore b/.gitignore old mode 100644 new mode 100755 index e693f8d..ea550af --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,272 @@ -*.pyc *.cfg scratch.py +config.py + +.DS_STORE +run.vbs + +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. + +# User-specific files +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ + +# Visual Studio 2015 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUNIT +*.VisualState.xml +TestResult.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# DNX +project.lock.json +project.fragment.lock.json +artifacts/ + +*_i.c +*_p.c +*_i.h +*.ilk +*.meta +*.obj +*.pch +*.pdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*.log +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# JustCode is a .NET coding add-in +.JustCode + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# TODO: Comment the next line if you want to checkin your web deploy settings +# but database connection strings (with potential passwords) will be unencrypted +#*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# The packages folder can be ignored because of Package Restore +**/packages/* +# except build/, which is used as an MSBuild target. +!**/packages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/packages/repositories.config +# NuGet v3's project.json files produces more ignoreable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +node_modules/ +orleans.codegen.cs + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm + +# SQL Server files +*.mdf +*.ldf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# JetBrains Rider +.idea/ +*.sln.iml + +# CodeRush +.cr/ + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc + + +#vim +*.swp diff --git a/MeetupDF.py b/MeetupDF.py new file mode 100644 index 0000000..a839e15 --- /dev/null +++ b/MeetupDF.py @@ -0,0 +1,67 @@ +import pandas as pd +import re +import os +from datetime import datetime + +pd.options.mode.chained_assignment = 'raise' + +class MeetupDF(pd.DataFrame): + + @property + def _constructor(self): + return MeetupDF + + @staticmethod + def clean_description(desc): + #remove html tags + desc = re.sub('<[^>]+>', '', desc) + desc_spl = desc.split('.') + + for i, sentence in enumerate(desc_spl): + sentence = sentence + '.' + if i == 0: + new_desc = sentence + elif len(new_desc) < 285: + new_desc = new_desc + sentence + else: + break + + new_desc = new_desc.replace('\n', '') + new_desc = new_desc.replace(' ', '') + return new_desc + + @staticmethod + def parse_extra_col_names(extra_fields): + s = extra_fields.replace(' ','') + return s.split(',') + + def edit_df(self, extra_fields=None): + col_to_keep = ['name','members', 'city', 'description', 'next_event', 'join_mode','link','score'] + if extra_fields is not None: + extra_cols = self.parse_extra_col_names(extra_fields) + col_to_keep = col_to_keep + extra_cols + + self = self[col_to_keep] + self = self.rename(columns={'score':'relevancy_score'}) + self['description'] = self['description'].apply(self.clean_description) + + if 'last_event' in col_to_keep: + self['prev_event_name'] = self['last_event'].apply(lambda x: x['name'] if pd.notnull(x) else -1) + self['prev_event_rsvp_count'] = self['last_event'].apply(lambda x: x['yes_rsvp_count'] if pd.notnull(x) else -1) + self.drop(['last_event'], axis=1, inplace=True) + + self['next_event_name'] = self['next_event'].apply(lambda x: x['name'] if pd.notnull(x) else -1) + self['next_event_time'] = self['next_event'].apply(lambda x: datetime.fromtimestamp(x['time']/1000) if pd.notnull(x) else -1) + self['next_event_rsvp_count'] = self['next_event'].apply(lambda x: x['yes_rsvp_count'] if pd.notnull(x) else -1) + self.drop(['next_event'], axis=1, inplace=True) + + self = self.sort_values(['members'],ascending=False) + return self + + def save_wb(self, path = os.path.expanduser('~/Documents/'), title = 'Meetup Groups'): + print('\nsaving excel file to {}'.format(path)) + writer = pd.ExcelWriter(path+ datetime.now().strftime("%Y-%m-%d ") + title + '.xlsx',engine='xlsxwriter', date_format = "m/d/yyy",datetime_format = "m/d/yyy") + self.to_excel(writer,sheet_name=title, merge_cells=False,index=False) + writer.save() + + diff --git a/MultipartPostHandler.py b/MultipartPostHandler.py old mode 100644 new mode 100755 index b9e3fcf..6c88df6 --- a/MultipartPostHandler.py +++ b/MultipartPostHandler.py @@ -38,10 +38,13 @@ then uploads it to the W3C validator. """ -import urllib -import urllib2 -import mimetools, mimetypes +import urllib.request, urllib.parse, urllib.error +import urllib.request, urllib.error, urllib.parse +#import mimetools +import mimetypes import os, stat +import string +import random class Callable: def __init__(self, anycallable): @@ -51,8 +54,8 @@ def __init__(self, anycallable): # assigning a sequence. doseq = 1 -class MultipartPostHandler(urllib2.BaseHandler): - handler_order = urllib2.HTTPHandler.handler_order - 10 # needs to run first +class MultipartPostHandler(urllib.request.BaseHandler): + handler_order = urllib.request.HTTPHandler.handler_order - 10 # needs to run first def http_request(self, request): data = request.get_data() @@ -60,31 +63,34 @@ def http_request(self, request): v_files = [] v_vars = [] try: - for(key, value) in data.items(): + for(key, value) in list(data.items()): if type(value) == file: v_files.append((key, value)) else: v_vars.append((key, value)) except TypeError: systype, value, traceback = sys.exc_info() - raise TypeError, "not a valid non-string sequence or mapping object", traceback + raise TypeError("not a valid non-string sequence or mapping object").with_traceback(traceback) if len(v_files) == 0: - data = urllib.urlencode(v_vars, doseq) + data = urllib.parse.urlencode(v_vars, doseq) else: boundary, data = self.multipart_encode(v_vars, v_files) contenttype = 'multipart/form-data; boundary=%s' % boundary if(request.has_header('Content-Type') and request.get_header('Content-Type').find('multipart/form-data') != 0): - print "Replacing %s with %s" % (request.get_header('content-type'), 'multipart/form-data') + print("Replacing %s with %s" % (request.get_header('content-type'), 'multipart/form-data')) request.add_unredirected_header('Content-Type', contenttype) request.add_data(data) return request + def id_generator(size=10, chars=string.ascii_uppercase + string.digits): + return ''.join(random.choice(chars) for _ in range(size)) + def multipart_encode(vars, files, boundary = None, buffer = None): if boundary is None: - boundary = mimetools.choose_boundary() + boundary = id_generator() if buffer is None: buffer = '' for(key, value) in vars: @@ -103,15 +109,15 @@ def multipart_encode(vars, files, boundary = None, buffer = None): buffer += '\r\n' + fd.read() + '\r\n' buffer += '--%s--\r\n\r\n' % boundary return boundary, buffer - multipart_encode = Callable(multipart_encode) + multipart_encode = Callable(multipart_encode) https_request = http_request def main(): import tempfile, sys validatorURL = "http://validator.w3.org/check" - opener = urllib2.build_opener(MultipartPostHandler) + opener = urllib.request.build_opener(MultipartPostHandler) def validateFile(url): temp = tempfile.mkstemp(suffix=".html") @@ -119,7 +125,7 @@ def validateFile(url): params = { "ss" : "0", # show source "doctype" : "Inline", "uploaded_file" : open(temp[1], "rb") } - print opener.open(validatorURL, params).read() + print(opener.open(validatorURL, params).read()) os.remove(temp[1]) if len(sys.argv[1:]) > 0: diff --git a/README.md b/README.md old mode 100644 new mode 100755 index c6c6caf..db0d9a5 --- a/README.md +++ b/README.md @@ -5,5 +5,7 @@ _note_ this project is _not_ actively maintained but contributions _are_ welcome See [Meetup API Clients][1] for details. +To get a list of groups- based on keyword, zip code, and radius- to either print in the console or be exported to an excel file, run `setup.py`, and once a config file is created, run `findGroups.py` + [1]:http://www.meetup.com/meetup_api/clients/ diff --git a/__init__.py b/__init__.py old mode 100644 new mode 100755 index 1ccf69c..29e5a11 --- a/__init__.py +++ b/__init__.py @@ -1 +1 @@ -from meetup_api_client import * +from .meetup_api_client import * diff --git a/app.py b/app.py index 2449a20..09d6284 100755 --- a/app.py +++ b/app.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -from __future__ import with_statement + """ Simple, partial test of client. Obtains an access token for the given consumer @@ -7,10 +7,10 @@ http://www.meetup.com/account/oauth_apps/ """ -import ConfigParser +import configparser -import meetup_api_client as mac -from meetup_api_client import * +from . import meetup_api_client as mac +from .meetup_api_client import * from optparse import OptionParser import webbrowser @@ -22,7 +22,7 @@ def config_client(config_name=None): def get_config(name=None): name = name or 'app.cfg' - config = ConfigParser.ConfigParser() + config = configparser.ConfigParser() config.optionxform = str config.read(name) @@ -69,12 +69,12 @@ def set_token(config, name, key, secret): mucli = get_client(config) def access_granted(): - print """\ + print("""\ access-key: %s accses-secret: %s Congratulations, you've got an access token! Try it out in an interpreter. - """ % get_token(config, 'access') + """ % get_token(config, 'access')) if config.has_section('access'): access_granted() @@ -84,7 +84,7 @@ def access_granted(): sys.exit("To complete the process you must supply a --verifier") request_key, request_secret = get_token(config, 'request') oauth_session = mucli.new_session(request_key=request_key, request_secret=request_secret) - print " member_id: %s" % oauth_session.fetch_access_token(options.verifier) + print(" member_id: %s" % oauth_session.fetch_access_token(options.verifier)) set_token(config, 'access', oauth_session.access_token.key, oauth_session.access_token.secret) access_granted() else: @@ -97,7 +97,7 @@ def access_granted(): url = oauth_session.get_authenticate_url() else: url = oauth_session.get_authorize_url() - print "Opening a browser on the authorization page: %s" % url + print("Opening a browser on the authorization page: %s" % url) webbrowser.open(url) diff --git a/findGroups.py b/findGroups.py old mode 100644 new mode 100755 index f900f62..f969787 --- a/findGroups.py +++ b/findGroups.py @@ -1,28 +1,63 @@ #!/usr/bin/env python -from __future__ import with_statement -# eg: python findGroups.py --apikey=$myKey --radius=10 --zip=02143 --text="python" -# get your key at https://secure.meetup.com/meetup_api/key/ +"""Outputs a list of meetup groups based on user search criteria to either an Excel file or prints the output to the console. +Search parameters can be passed in via the command line- else default parameters are used. +command line example: python findGroups.py --radius=10 --zip=02143 --text="python" + +An api key can be obtained at https://secure.meetup.com/meetup_api/key/ +""" + +import argparse +import base64 +import sys import meetup_api_client as mac -from meetup_api_client import * -from optparse import OptionParser + +TO_EXCEL = True +DEFAULT_ZIP_CODE = '92024' +DEFAULT_RADIUS = '25' +DEFAULT_SEARCH_PARAM = 'Python' if __name__ == '__main__': - option = OptionParser('%prog --apikey --zip --radius --text') - option.add_option('--apikey', dest='apikey', - help='API key') - option.add_option('--zip', dest='zip', - help='Zip code used for radial search') - option.add_option('--radius', dest='radius', - help='Radius to search in miles') - option.add_option('--text', dest='text', - help='Text to search for in group name / description') - (options, args) = option.parse_args() - - client = mac.Meetup(options.apikey) - groups = client.find_groups(zip=options.zip, radius=options.radius, text=options.text, order="members") - - for group in groups: - print str(group['id']) + ': ' + group['name'] + ' (' + str(group['members']) + ' members)' + #get user API key from config file + try: + import config + except: + print("\nFailed to import config file- please run setup.py") + KEY = base64.b64decode(config.credentials['api_key']).decode() + client = mac.Meetup(KEY) + + #if user entered command line arguments + if len(sys.argv) > 1: + parser = argparse.ArgumentParser() + parser.add_argument('--zip', dest='zip', help='Zip code used for radial search') + parser.add_argument('--radius', dest='radius', help='Radius to search in miles') + parser.add_argument('--text', dest='text', help='Text to search for in group name / description') + + args = parser.parse_args() + print('\nArguments Inputted: '+str(args)) + groups = client.find_groups(zip=args.zip, radius=args.radius, text=args.text, order="members") + + #else no cmd line arguments- use default arguments + else: + extra_fields = 'last_event, past_event_count' + sort_on = 'members' + + kwargs = {'zip':DEFAULT_ZIP_CODE, 'radius':DEFAULT_RADIUS, 'text':DEFAULT_SEARCH_PARAM, 'fields':extra_fields, 'order':sort_on} + print('\nNo command line arguments supplied, default arguments are {}'.format(kwargs)) + groups = client.find_groups(**kwargs) + + + if TO_EXCEL: + from MeetupDF import MeetupDF + df = MeetupDF(groups) + try: + df = df.edit_df(extra_fields) + except: + df = df.edit_df() + df.save_wb(title = search_param + ' Meetup Groups') + else: + print('\n') + for group in groups: + print((str(group['id']) + ': ' + group['name'] + ' (' + str(group['members']) + ' members)')) diff --git a/meetup_api_client.py b/meetup_api_client.py index ba41bfc..0ebb960 100755 --- a/meetup_api_client.py +++ b/meetup_api_client.py @@ -1,13 +1,13 @@ -#!/usr/bin/env python -from __future__ import with_statement + import datetime import time import cgi import types import logging -from urllib import urlencode -from urllib2 import HTTPError, HTTPErrorProcessor, urlopen, Request, build_opener +from urllib.parse import urlencode +from urllib.error import HTTPError +from urllib.request import urlopen, Request, build_opener, HTTPErrorProcessor import oauth import MultipartPostHandler as mph @@ -30,7 +30,7 @@ import simplejson parse_json = lambda s: simplejson.loads(s.decode(API_JSON_ENCODING)) except: - print "Error - your system is missing support for a JSON parsing library." + print("Error - your system is missing support for a JSON parsing library.") GROUPS_URI = '2/groups' EVENTS_URI = '2/events' @@ -66,7 +66,7 @@ class MeetupHTTPErrorProcessor(HTTPErrorProcessor): def http_response(self, request, response): try: return HTTPErrorProcessor.http_response(self, request, response) - except HTTPError, e: + except HTTPError as e: data = e.read() try: @@ -148,8 +148,8 @@ def _generate_read_method(name): def read_method(self, **args): return API_Response(self._fetch(name, **args), name) return read_method -for method, uri in READ_METHODS.items(): - read_method = types.MethodType(_generate_read_method(uri), None, Meetup) +for method, uri in list(READ_METHODS.items()): + read_method = types.MethodType(_generate_read_method(uri), Meetup) setattr(Meetup, 'get_' + method, read_method) class NoToken(Exception): @@ -283,7 +283,7 @@ def __init__(self, properties): """load properties that are relevant to all items (id, etc.)""" for field in self.datafields: # Not all fields are required to be returned - if properties.has_key(field): + if field in properties: self.__setattr__(field, properties[field]) self.json = properties diff --git a/oauth.py b/oauth.py old mode 100644 new mode 100755 index 550eb5d..c7e2b4c --- a/oauth.py +++ b/oauth.py @@ -23,10 +23,10 @@ """ import cgi -import urllib +import urllib.request, urllib.parse, urllib.error import time import random -import urlparse +import urllib.parse import hmac import binascii @@ -47,11 +47,11 @@ def build_authenticate_header(realm=''): def escape(s): """Escape a URL including any /.""" - return urllib.quote(s, safe='~') + return urllib.parse.quote(s, safe='~') def _utf8_str(s): """Convert unicode to utf-8.""" - if isinstance(s, unicode): + if isinstance(s, str): return s.encode("utf-8") else: return str(s) @@ -115,13 +115,13 @@ def set_verifier(self, verifier=None): def get_callback_url(self): if self.callback and self.verifier: # Append the oauth_verifier. - parts = urlparse.urlparse(self.callback) + parts = urllib.parse.urlparse(self.callback) scheme, netloc, path, params, query, fragment = parts[:6] if query: query = '%s&oauth_verifier=%s' % (query, self.verifier) else: query = 'oauth_verifier=%s' % self.verifier - return urlparse.urlunparse((scheme, netloc, path, params, + return urllib.parse.urlunparse((scheme, netloc, path, params, query, fragment)) return self.callback @@ -132,7 +132,7 @@ def to_string(self): } if self.callback_confirmed is not None: data['oauth_callback_confirmed'] = self.callback_confirmed - return urllib.urlencode(data) + return urllib.parse.urlencode(data) def from_string(s): """ Returns a token from something like: @@ -193,7 +193,7 @@ def _get_timestamp_nonce(self): def get_nonoauth_parameters(self): """Get any non-OAuth parameters.""" parameters = {} - for k, v in self.parameters.iteritems(): + for k, v in self.parameters.items(): # Ignore oauth parameters. if k.find('oauth_') < 0: parameters[k] = v @@ -204,7 +204,7 @@ def to_header(self, realm=''): auth_header = 'OAuth realm="%s"' % realm # Add the oauth parameters. if self.parameters: - for k, v in self.parameters.iteritems(): + for k, v in self.parameters.items(): if k[:6] == 'oauth_': auth_header += ', %s="%s"' % (k, escape(str(v))) return {'Authorization': auth_header} @@ -212,7 +212,7 @@ def to_header(self, realm=''): def to_postdata(self): """Serialize as post data for a POST request.""" return '&'.join(['%s=%s' % (escape(str(k)), escape(str(v))) \ - for k, v in self.parameters.iteritems()]) + for k, v in self.parameters.items()]) def to_url(self): """Serialize as a URL for a GET request.""" @@ -228,7 +228,7 @@ def get_normalized_parameters(self): pass # Escape key values before sorting. key_values = [(escape(_utf8_str(k)), escape(_utf8_str(v))) \ - for k,v in params.items()] + for k,v in list(params.items())] # Sort lexicographically, first after key, then after value. key_values.sort() # Combine key value pairs into a string. @@ -240,7 +240,7 @@ def get_normalized_http_method(self): def get_normalized_http_url(self): """Parses the URL and rebuilds it to be scheme://host/path.""" - parts = urlparse.urlparse(self.http_url) + parts = urllib.parse.urlparse(self.http_url) scheme, netloc, path = parts[:3] # Exclude default port numbers. if scheme == 'http' and netloc[-3:] == ':80': @@ -288,7 +288,7 @@ def from_request(http_method, http_url, headers=None, parameters=None, parameters.update(query_params) # URL parameters. - param_str = urlparse.urlparse(http_url)[4] # query + param_str = urllib.parse.urlparse(http_url)[4] # query url_params = OAuthRequest._split_url_string(param_str) parameters.update(url_params) @@ -354,15 +354,15 @@ def _split_header(header): # Split key-value. param_parts = param.split('=', 1) # Remove quotes and unescape the value. - params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"')) + params[param_parts[0]] = urllib.parse.unquote(param_parts[1].strip('\"')) return params _split_header = staticmethod(_split_header) def _split_url_string(param_str): """Turn URL string into parameters.""" parameters = cgi.parse_qs(param_str, keep_blank_values=False) - for k, v in parameters.iteritems(): - parameters[k] = urllib.unquote(v[0]) + for k, v in parameters.items(): + parameters[k] = urllib.parse.unquote(v[0]) return parameters _split_url_string = staticmethod(_split_url_string) @@ -467,7 +467,7 @@ def _get_signature_method(self, oauth_request): # Get the signature method object. signature_method = self.signature_methods[signature_method] except: - signature_method_names = ', '.join(self.signature_methods.keys()) + signature_method_names = ', '.join(list(self.signature_methods.keys())) raise OAuthError('Signature method %s not supported try one of the ' 'following: %s' % (signature_method, signature_method_names)) diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..8e6fab0 --- /dev/null +++ b/setup.py @@ -0,0 +1,38 @@ +import os +import getpass +import base64 +import time + + +CONFIG_FILE_PATH = "config.py" + +CONFIG_FILE_TEMPLATE = """credentials = dict(api_key='{0}') """ + + +# get hashed credentials +try: + api_key = base64.b64encode(raw_input("enter/paste API Key: ").encode()).decode() +except: + api_key = base64.b64encode(input("enter/paste API Key: ").encode()).decode() +print(" Hashed: {}\n".format(api_key)) + +new_config = CONFIG_FILE_TEMPLATE.format(api_key) + +# check if config file exists +if not os.path.isfile(CONFIG_FILE_PATH): + # create new config file + with open(CONFIG_FILE_PATH, "w") as config_file: + config_file.write(new_config) + print("{} created successfully".format(CONFIG_FILE_PATH)) +else: + with open(CONFIG_FILE_PATH, "r") as config_file: + cur_config = config_file.read() + if new_config != cur_config: + # update config file + with open(CONFIG_FILE_PATH, "w") as config_file: + config_file.writelines(new_config) + print("{} updated successfully".format(CONFIG_FILE_PATH)) + else: + print("{} already contains latest credentials".format(CONFIG_FILE_PATH)) +time.sleep(2) +