#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
HTN Hijack Servant
==================
A tool for the HackTheNet_ browser game built to aid in
finding inactive user accounts (whose computers can be
hijacked regardless of the league).
Tested with HTN version 2.5 Beta.
:Copyright: 2005 Y0Gi
:Date: 12-Dec-2005
:License: GNU General Public License
:Version: 0.3
.. _HackTheNet: http://www.hackthenet.org/
"""
from optparse import OptionParser
import re
import sys
import urllib2
# Configuration
USER_AGENT = ('Mozilla/5.0 (X11; U; Linux i686; de-DE; rv:1.7.12)'
' Gecko/20051010 Firefox/1.0.7')
URL_BASE = 'http://htn25.unkreativ.org/_htn.php'
USERS_PER_PAGE = 10
class User(object):
def __init__(self, id=0, name='', points=0, rank=0):
self.id = id
self.name = name
self.cluster = ''
self.league = 0
self.points = points
self.computers = []
def __str__(self):
str = ('%(name)s [ID %(id)d, %%s, league %(league)d,'
' %(points)s points, %%d computers]') % self.__dict__
return str % (self.cluster or 'no cluster', len(self.computers))
class Computer(object):
def __init__(self, ip_address='10.x.x.x', name=''):
self.ip_address = ip_address
self.name = name
def __str__(self):
return '%(ip_address)s (%(name)s)' % self.__dict__
_debug = False
def debug(msg):
if _debug:
print msg
_logfile = None
def log(msg):
print msg
if isinstance(_logfile, file):
_logfile.write(msg + '\n')
_logfile.flush()
def main():
print 'HTN Hijack Servant by Y0Gi\n'
# Create option parser and define options.
parser = OptionParser(
usage='%prog [options] <session ID>',
version='HTN Hijack Servant',
description='HTN Hijack Servant was built to aid in'
' finding inactive user accounts (whose'
' computers can be hijacked regardless of'
' the league).')
parser.add_option('-d', '--debug',
action='store_true', dest='debug', default=False,
help='show detailed output')
parser.add_option('-f', '--file',
dest='logfile',
help='log targets to FILE', metavar='FILE')
parser.add_option('-b', '--begin',
type='int', dest='begin', default=1,
help='specify the rank to begin with (default: 1)',
metavar='RANK')
parser.add_option('-o', '--offline',
type='int', dest='min_days_offline', default=6,
help='specify minimum number of days a user must be'
' offline (default: 6)',
metavar='DAYS')
parser.add_option('-c', '--computers',
action='store_true', dest='list_computers',
default=False, help='list computers')
parser.add_option('-s', '--single',
action='store_true', dest='single_computer_only',
default=False, help='show only users with one computer')
# Process options and arguments.
options, args = parser.parse_args()
if len(args) != 1:
parser.print_help()
parser.exit()
session_id = args[0]
if options.debug:
_debug = True
print 'Debugging output on.'
if options.logfile:
_logfile = open(options.logfile, 'w')
print 'Logging to file %s' % options.logfile
print
# Set URLs.
url_users_page = ('%s/ranking/user?sid=%s&league=0&start='
% (URL_BASE, session_id))
url_user_profile = ('%s/user/info?sid=%s&id='
% (URL_BASE, session_id))
# Precompile regular expression patterns.
re_user = re.compile('/user/info\?.*?id=([0-9]+?)">(.+?)</a>')
re_league = re.compile('<th>League:</th><td>([1-3])</td>')
re_points = re.compile('<th>Punkte:</th><td>([0-9\.]+)</td>')
re_cluster = re.compile('<th>Cluster:</th><td><a href=".+?">(.+?)</a></td>')
# TODO: Extract computer count via regex?
re_computer = re.compile('<li>(10\.\d+\.\d+\.\d+) \((.+?)\)')
re_offline = re.compile('Offline seit: (\d+) Tage, (\d\d:\d\d:\d\d) Stunden')
# Customize the URL opener.
opener = urllib2.build_opener()
opener.addheaders = [('User-Agent', USER_AGENT), ('Referer', URL_BASE)]
urllib2.install_opener(opener)
# Main loop
offset = options.begin
while True:
try:
# Fetch users page.
try:
url = url_users_page + str(offset)
data = urllib2.urlopen(url).read()
except urllib2.URLError, e:
sys.exit('Could not get user page: ' + str(e))
# Extract users from that page.
users = []
for id, name in re_user.findall(data):
users.append(User(int(id), name))
debug('--- Found %d users (began with rank #%d) ---'
% (len(users), offset))
# Retrieve users details.
for user in users:
debug('Retrieving details for user %s [%d] ...'
% (user.name, user.id))
try:
url = url_user_profile + str(user.id)
data = urllib2.urlopen(url).read()
except urllib2.URLError, e:
print 'Error retrieving details for user #%d:' % user.id, e
continue
except KeyboardInterrupt:
# Catch Ctrl-C here, too, or it won't abort.
raise
except:
# Incompletely sent documents trigger this
# error. A ``ValueError is raised``, but
# ``except ValueError:`` doesn't seem to
# catch it. Strange.
print 'The document seems to be incomplete, skipping.'
continue
# Extracting details.
try:
user.league = int(re_league.search(data).group(1))
except (AttributeError, ValueError):
pass
try:
user.points = re_points.search(data).group(1)
except AttributeError:
pass
try:
user.cluster = re_cluster.search(data).group(1)
except AttributeError:
pass
# Check status.
m = re_offline.search(data)
if not m:
debug(' User seems to be online, skipping.')
continue
days_offline = int(m.group(1))
if days_offline < options.min_days_offline:
debug(' User has not been offline long enough, skipping.')
continue
# Fetch computers.
for ip_address, name in re_computer.findall(data):
user.computers.append(Computer(ip_address, name))
if options.single_computer_only and len(user.computers) != 1:
debug(' User has multiple computers, skipping.')
continue
# Print target details.
log('%s is offline since %s days, %s hours'
% (user, days, m.group(2)))
if options.list_computers:
for computer in user.computers:
log(' * ' + str(computer))
# Break if last page reached.
if len(users) < USERS_PER_PAGE:
log('Seems to be the last page, stopping.')
break
offset += USERS_PER_PAGE
except KeyboardInterrupt:
print 'Aborting ...'
break
if isinstance(_logfile, file):
_logfile.close()
if __name__ == '__main__':
main()