OK. but how do I get it to work with GeekTool? I mainly use the shell never a script!
So the first link I gave you wasn't good, it isn't a 5 day forecast.
Here is the actual 5 day script, save as fivedayforecastv3.py in the directory of your choosing.
Code:
#!/usr/bin/env python
""" FIVEDAYFORCAST
ROBERT WOLTERMAN (xtacocorex) - 2011
GRABS THE 5 DAY WEATHER FORECAST FROM YAHOO WEATHER FOR PRINTING IN GEEKTOOL
AWESOMENESS FLOWS FROM HERE, THERE ARE NO FILES DOWNLOADED FROM THE INTERNET
THE ONLY FILE NEEDED IS A ZIPCODE TO WOEID MAPPING TO KEEP THE YAHOO QUERIES
TO A MINIMUM (1000 HITS/HOUR LIMIT)
*** MAKE SURE TO KEEP THE REFRESH RATE IN GEEKTOOL/NERDTOOL TO A SLOWER VALUE
THANKS TO brenm666 AND dotcommer ON THE MACRUMORS FORUMS FOR BETA TESTING THIS
"""
# CHANGELOG
# 07 JUNE 2011
# - ADDED OPTION TO SPECIFY A URL TO PARSE IF LOCATION GRABBING ISN'T VALID
# FOR A USERS AREA
# _ FIXED ISSUE WITH urllib2 NOT WORKING PROPERLY OUTSIDE OF THE USA, MADE
# THE CODE USE urllib INSTEAD
# - FIXED ISSUE WHERE ZIP CODES HAVE SPACES, THIS WOULD CAUSE NON-USA LOCATIONS
# TO FAIL IN THE YAHOO QUERY
# - RE-WROTE REGULAR EXPRESSIONS TO PARSE THE HTML
# 04 JUNE 2011
# - ADDED SUPPORT TO AUTOMAGICALLY GRAB THE ZIP CODE AND WOEID AND GENERATE THE
# THE PROPER URL FOR YAHOO WEATHER
# - RE-FORMATTED THE COMMAND LINE ARGUMENTS
# - ADDED SUPPORT TO TURN ON/OFF UNDERLINING THE DAY HEADER
# 30 MAY 2011
# - ADDED 5 DAY HIGH/LOW CODE
# 29 MAY 2011
# - INITIAL WRITE
# MODULE IMPORTS
import urllib, os, re, sys, string, ast, optparse
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# *** ONLY MODIFY THIS SECTION ***
# MODIFY THIS WITH YOUR WEATHER URL
UNITS = "C"
# FILE FOR WHERE THE WOEID TO ZIPCODE MAP GOES
# CURRENTLY SET TO BE A HIDDEN FOLDER IN THE USERS HOME DIRECTORY
# YOU CAN CUSTOMIZE THIS IF YOU WANT THE FILE SOMEPLACE ELSE
FCASTWOEIDDATA = os.getenv('HOME') + os.path.sep + ".zip_woeid_map.txt"
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# DEBUG FLAG
DEBUG = False
UNITTEST = False
URLTYPE = "5DAY"
YQLBASEURL = "http://query.yahooapis.com/v1/public/yql"
IPLOCURL = "http://ipinfodb.com/my_ip_location.php"
# CONSTANTS
COMMA = "%2C"
SPACE = "%20"
EQUAL = "%3D"
def myencode(instr):
"""
myencode
- FUNCTION TO ENCODE THE YAHOO QUERY TEXT PROPERLY FOR THE URL
- INPUT: instr - QUERY STRING TO ENCODE
- OUPUTS: rstr - ENCODED QUERY STRING
"""
rstr = ""
for i in xrange(len(instr)):
if instr[i] == " ":
rstr += SPACE
elif instr[i] == "=":
rstr += EQUAL
else:
rstr += instr[i]
return rstr
def getLocationData():
"""
getWOEID
- FUNCTION TO GET THE WOEID ID FROM THE LOCALLY STORED DATABASE
OR
PULLS THE DATA FROM THE INTERNET AND STORES INTO THE DATABASE
- INPUT: NONE
- OUPUTS: locdict - DICTIONARY OF LOCATION DATA
"""
# SET UP OUR DICTIONARY
locdict = { 'zipcode' : "",
'country' : "",
'stprov' : "",
'city' : "",
'lat' : "",
'lon' : ""}
# GET THE ZIP CODE BASED UPON OUR EXTERNAL IP ADDRESS
if not UNITTEST:
url = urllib.urlopen(IPLOCURL)
ippagedata = url.read()
url.close()
if UNITTEST and DEBUG:
# DEBUG
print "USING LOCAL FILE"
loco = open("./myloc.txt")
ippagedata = loco.read()
loco.close()
# REGULAR EXPRESSION TO FIND THE NEEDED DATA IN THE WEBPAGE
# WOULD BE BETTER TO USE SGML TO PARSE THE HTML BUT I DON'T
# WANT TO FIGURE IT OUT
# GET THE COUNTRY
mat = re.search('<li>Country : .*<img',ippagedata)
locdict['country'] = mat.group(0).split(': ')[1].split('<img')[0].rstrip().lower()
# GET THE STATE/PROVINCE
mat = re.search('<li>State/Province : .*</li>',ippagedata)
locdict['stprov'] = mat.group(0).split(': ')[1].split('</li>')[0].lower()
# GET THE CITY
mat = re.search('<li>City : .*</li>',ippagedata)
locdict['city'] = mat.group(0).split(': ')[1].split('</li>')[0].lower()
# GET THE ZIP CODE
mat = re.search('<li>Zip or postal code : .*</li>',ippagedata)
locdict['zipcode'] = mat.group(0).split(': ')[1].split('</li>')[0]
# GET THE LATITUDE
mat = re.search('<li>Latitude : .*</li>',ippagedata)
locdict['lat'] = mat.group(0).split(': ')[1].split('</li>')[0]
# GET THE LONGITUDE
mat = re.search('<li>Longitude : .*</li>',ippagedata)
locdict['lon'] = mat.group(0).split(': ')[1].split('</li>')[0]
# RETURN THE LOCATION DICTIONARY
return locdict
def getWOEID(zipcode):
"""
getWOEID
- FUNCTION TO GET THE WOEID ID FROM THE LOCALLY STORED DATABASE
OR
PULLS THE DATA FROM THE INTERNET AND STORES INTO THE DATABASE
- INPUT: zipcode - ZIPCODE FROM LOCATION DATA OR USER INPUT
- OUPUTS: woeid - THE WOEID WE WANT BASED ON OUR ZIPCODE
"""
# CHECK TO SEE IF THE ZIP IS IN THE FILE
haveWOEID = False
infile = False
if os.path.isfile(FCASTWOEIDDATA):
fin = open(FCASTWOEIDDATA,'r+a')
infile = True
else:
# WE HIT THE CASE WHERE THE FILE DOESN'T EXIST
fin = open(FCASTWOEIDDATA,'w')
infile = False
# IF WE HAVE THE INPUT FILE, LOOK FOR OUR ZIPCODE
if infile:
for line in fin.readlines():
if zipcode in line:
haveWOEID = True
# GET THE WOEID AND RIGHT STRIP TO REMOVE THE RETURN LINE FROM THE FILE
woeid = line.split(",")[1].rstrip()
# NOT RETURNING HERE SO WE CAN GRACEFULLY CLOSE THE INPUT FILE
# THIS IS A STUPID LITTLE ISSUE
else:
haveWOEID = False
# IF WE DON'T HAVE THE WOEID, LET'S GET THE SHIZZLE AND STORE LOCALLY
if not haveWOEID:
# GET THE URL TO FIND WOEID
yqlquery = 'select woeid from geo.places where text=\"' + zipcode + '\" limit 1'
yqlformat = "json"
WOEIDURL = YQLBASEURL + "?q=" + myencode(yqlquery) + "&format=" + yqlformat
# GET THE DATA FROM THE INTERNETS
reqresp = urllib.urlopen(WOEIDURL)
# NEED TO BREAK THE STRING INTO THE APPROPRIATE DICTIONARY STRUCTURE
resp = ast.literal_eval(reqresp.read())
reqresp.close()
# SEARCH THE RESPONSE FOR THE WOEID
# JSON IS AWESOME! YAY FOR NESTED DICTIONARIES
woeid = resp['query']['results']['place']['woeid']
# WRITE THE DATA TO THE FILE
fmt = "%s,%s\n" % (zipcode, woeid)
fin.write(fmt)
# NOW WE CAN CLOSE THE FILE
fin.close()
# RETURN THE WOEID WE FOUND
return woeid
def makeUrl(utype,locdict,woeid,units):
"""
makeUrl
- FUNCTION TO CREATE THE URL FOR EITHER THE YAHOO WEATHER RSS
OR THE 5-DAY FORECAST
- INPUT: utype - TYPE OF URL: RSS OR 5DAY
locdict - ZIPCODE FROM LOCATION DATA OR USER INPUT
woeid - THE WOEID WE WANT BASED ON OUR ZIPCODE
units - UNITS OF MEASURE
- OUPUTS: url - THE URL FOR THE PAGE WE WANT TO PARSE
"""
if utype == "RSS":
url = "http://weather.yahooapis.com/forecastrss?w="+woeid
if units == "C":
url += "&u=c"
elif utype == "5DAY":
# FORMAT THE COUNTRY PROPERLY
tmp = locdict['country'].split(" ")
country = ""
for i in xrange(len(tmp)):
country += tmp[i]
if i < len(tmp)-1:
country += "-"
# FORMAT THE STATE/PROVINCE CORRECTLY
tmp = locdict['stprov'].split(" ")
stprov = ""
for i in xrange(len(tmp)):
stprov += tmp[i]
if i < len(tmp)-1:
stprov += "-"
# FORMAT THE CITY PROPERLY
tmp = locdict['city'].split(" ")
city = ""
for i in xrange(len(tmp)):
city += tmp[i]
if i < len(tmp)-1:
city += "-"
# GENERATE URL
url = "http://weather.yahoo.com/" + country + "/" + stprov + "/" + city + "-" + woeid + "/"
if units == "C":
url += "&unit=c"
if DEBUG:
print country
print stprov
print city
print url
# RETURN THE URL
return url
def getForecast(url,fcastopt):
"""
getForecast()
- GETS THE FORECAST DATA
- INPUT: url - URL OF WEATHER PAGE
fcastopt - FORECAST TYPE
- OUPUTS: NONE
"""
# FUNCTION VARIABLES
days = []
fcast = []
highs = []
lows = []
fcastdata = ""
# FOR DEPLOYMENT
if not DEBUG:
urld = urllib.urlopen(url)
urldata = urld.read()
else:
# DEBUG
urld = open("./lincolntest.txt")
urldata = urld.read()
urld.close()
#print urldata
if fcastopt.detailed:
# GET THE DETAILED FORECAST SECTION
mat = re.search('<div id="yw-detailedforecast">.*</div>.*<div> <!-- SpaceID=0',urldata,re.DOTALL)
fcastdata = mat.group(0)
# SPLIT THE LIST UP BY THE HTML <li> TAGS
fcastdata = fcastdata.split('<li>')
# REMOVE THE FIRST ITEM IN THE LIST AS IT ISN'T DATA
fcastdata.pop(0)
# LOOP THROUGH THE LIST AND BREAK UP THE LIST ITEMS
for item in fcastdata:
tmp = item.split('\n')
# GET THE DAY
days.append(tmp[0].split('<strong>')[1].split('</strong> ')[0])
# GET THE FORECAST
fcast.append(tmp[1])
# PRINT OUT THE FORECAST
for i in xrange(len(days)):
temp = string.capwords(days[i])
if fcastopt.underline:
print "\033[4m" + temp + "\033[0m"
else:
print temp
print " " + fcast[i] + "\n"
elif fcastopt.highlow:
# SEARCH FOR THE 5 DAY
mat = re.search('<div id="yw-fivedayforecast">.*<td rowspan="2" class="extended"><!-- SpaceID=0 robot -->',urldata,re.DOTALL)
fcastdata = mat.group(0).split('<tr')
# REMOVE THE FIRST LIST ROW
fcastdata.pop(0)
# GET THE DAYS
daysect = fcastdata[0].split('<th>')
daysect.pop(0)
for d in daysect:
days.append(d.split('</th')[0])
# GET THE AREA WITH THE FORECAST
fcastsect = fcastdata[1].split('<br/>')
fcastsect.pop(0)
for f in fcastsect:
if "-" in f:
tmp2 = f.split('-\n')
tmp3 = ""
for charsect in tmp2:
tmp3 += charsect
fcast.append(tmp3.split('</div>')[0])
else:
fcast.append(f.split('</div>')[0])
# GET THE AREA WITH THE HIGHS/LOWS
mat = re.search('<tr class="fiveday-temps">.*</td></tr></table></div>',urldata,re.DOTALL)
hlsect = mat.group(0).split('<td>')
hlsect.pop(0)
for hl in hlsect:
tmp = hl.split(": ")
highs.append(int(tmp[1].split("&#")[0]))
lows.append(int(tmp[2].split("&#")[0]))
# PRINT OUT THE FORECAST
for i in xrange(len(days)):
if fcastopt.underline:
print "\033[4m%s\033[0m:\n %s\n H: %3d%s L: %3d%s\n" % (days[i],fcast[i],highs[i],UNITS,lows[i],UNITS)
else:
print "%s:\n %s\n H: %3d%s L: %3d%s\n" % (days[i],fcast[i],highs[i],UNITS,lows[i],UNITS)
def cmdLineOptionParser():
"""
cmdLineOptionParser()
- PARSES THE COMMAND LINE ARGUMENTS
- INPUT: NONE
- OUPUTS: NONE
"""
# CREATE OUR USAGE REPSONSE
usage = ("%prog [options]",__doc__)
usage = "\n".join(usage)
# CREATE OUR COMMAND LINE PARSER
cmdparser = optparse.OptionParser(usage)
# ADD OPTIONS
cmdparser.add_option('-d', '--detailed', action='store_true',
help="Displays the detailed forecast",
default=False
)
cmdparser.add_option('-l', '--highlow', action='store_true',
help="Displays the Highs and Lows only forecast",
default=False
)
cmdparser.add_option('-u', '--underline', action='store_true',
help="Underlines the day headers",
default=False
)
cmdparser.add_option('-w', '--webaddress', action='store', type='string',
help="Used for pre-specifying the Yahoo Weather URL instead of automatically grabbing based on external IP",
default=''
)
# RETURN THE PARSER
return cmdparser
def forecastMain(argv):
"""
forecastMain()
- MAIN SCRIPT FUNCTION THAT ORGANIZES THE TASKS
- INPUT: args - COMMAND LINE ARGUMENTS
- OUPUTS: NONE
"""
# FIGURE OUT COMMAND LINE ARGUMENTS
# FCASTTYPE OF -l IS FOR 5 DAY HIGHS/LOWS
# FCASTTYPE OF -d IS FOR 5 DAY DETAILED
cmdparser = cmdLineOptionParser()
opts, args = cmdparser.parse_args(argv)
# DETERMINE IF WE ARE USING A PROVIDED URL
if opts.webaddress == '':
# GET OUR LOCATION DICTIONARY
locdict = getLocationData()
# GET OUR WOEID
mywoeid = getWOEID(locdict['zipcode'])
# GET THE APPROPRIATE URL WE REQUIRE
url = makeUrl(URLTYPE,locdict,mywoeid,UNITS)
else:
url = opts.webaddress
# GET THE FORECAST
getForecast(url,opts)
if __name__ == "__main__":
forecastMain(sys.argv[1:])
Here are the command line options:
Code:
$ python fivedayforecastv3.py -h
Usage: fivedayforecastv3.py [options]
FIVEDAYFORCAST
ROBERT WOLTERMAN (xtacocorex) - 2011
GRABS THE 5 DAY WEATHER FORECAST FROM YAHOO WEATHER FOR PRINTING IN GEEKTOOL
AWESOMENESS FLOWS FROM HERE, THERE ARE NO FILES DOWNLOADED FROM THE INTERNET
THE ONLY FILE NEEDED IS A ZIPCODE TO WOEID MAPPING TO KEEP THE YAHOO QUERIES
TO A MINIMUM (1000 HITS/HOUR LIMIT)
*** MAKE SURE TO KEEP THE REFRESH RATE IN GEEKTOOL/NERDTOOL TO A SLOWER VALUE
THANKS TO brenm666 AND dotcommer ON THE MACRUMORS FORUMS FOR BETA TESTING THIS
Options:
-h, --help show this help message and exit
-d, --detailed Displays the detailed forecast
-l, --highlow Displays the Highs and Lows only forecast
-u, --underline Underlines the day headers
-w WEBADDRESS, --webaddress=WEBADDRESS
Used for pre-specifying the Yahoo Weather URL instead
of automatically grabbing based on external IP
Since you want a detailed forecast, this is what you're going to type for the command in a shell geeklet:
Code:
python /path/to/script/fivedayforecastv3.py -d -u