aboutsummaryrefslogtreecommitdiff
blob: b43b5852afd1fd4b9aec09c4a55dccb836591b11 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
# -*- coding: utf-8 -*-

"""
    g_octave.fetch
    ~~~~~~~~~~~~~~
    
    This module implements a Python class responsible to fetch and update
    the package database and the auxiliary files.
    
    Used only by the live version of g-octave.
    
    :copyright: (c) 2009-2010 by Rafael Goncalves Martins
    :license: GPL-2, see LICENSE for more details.
"""

from __future__ import absolute_import, print_function

__all__ = ['fetch']

from .config import Config
conf = Config()

from .description_tree import DescriptionTree
from .exception import FetchException
from .compat import py3k, open as open_

if py3k:
    import urllib.request as urllib
else:
    import urllib2 as urllib

import glob
import json
import os
import re
import shutil
import subprocess
import sys
import tarfile

from contextlib import closing

def clean_db():
    for f in ['timestamp', 'info.json', 'patches', 'octave-forge', 'manifest.json']:
        current = os.path.join(conf.db, f)
        if os.path.isdir(current):
            shutil.rmtree(current)
        elif os.path.isfile(current):
            os.unlink(current)

class GitHub:
    
    re_db_mirror = re.compile(r'github://(?P<user>[^/]+)/(?P<repo>[^/]+)/?')
    
    def __init__(self, user, repo):
        self.user = user
        self.repo = repo
        self.api_url = 'http://github.com/api/v2/json'
        self.url = 'http://github.com'
    
    def need_update(self):
        return not os.path.exists(os.path.join(
            conf.db, 'cache', 'commit_id'
        ))
    
    def get_commits(self, branch='master'):
        url = '%s/commits/list/%s/%s/%s/' % (
            self.api_url,
            self.user,
            self.repo,
            branch
        )
        commits = {}
        with closing(urllib.urlopen(url)) as fp:
            commits = json.loads(fp.read().decode('utf-8'))
        return commits['commits']
    
    def fetch_db(self, branch='master'):
        cache = os.path.join(conf.db, 'cache')
        commit_id = os.path.join(cache, 'commit_id')
        if not os.path.exists(cache):
            os.makedirs(cache)
        last_commit = self.get_commits()[0]['id']
        if os.path.exists(commit_id):
            with open_(commit_id) as fp:
                if fp.read().strip() == last_commit:
                    return False
        dest = os.path.join(cache, 'octave-forge-%s.tar.gz' % last_commit)
        return_value = subprocess.call([
            'wget',
            '--continue',
            '--output-document', dest,
            '%s/%s/%s/tarball/%s/' % (
                self.url,
                self.user,
                self.repo,
                branch
            )
        ])
        if return_value == os.EX_OK:
            with open_(os.path.join(cache, 'commit_id'), 'w') as fp:
                fp.write(last_commit)
        return True

    def extract(self):
        clean_db()
        cache = os.path.join(conf.db, 'cache')
        commit_id = os.path.join(cache, 'commit_id')
        tarball = None
        if os.path.exists(commit_id):
            with open_(commit_id) as fp:
                tarball = os.path.join(
                    cache,
                    'octave-forge-%s.tar.gz' % fp.read().strip()
                )
        if tarball is not None:
            if tarfile.is_tarfile(tarball):
                with closing(tarfile.open(tarball, 'r')) as fp:
                    fp.extractall(conf.db)
                dirs = glob.glob('%s/%s-%s*' % (conf.db, self.user, self.repo))
                if len(dirs) != 1:
                    raise FetchException('Failed to extract the tarball.')
                    return
                for f in os.listdir(dirs[0]):
                    shutil.move(os.path.join(dirs[0], f), conf.db)
                os.rmdir(dirs[0])

# TODO: Implement gitweb support

__modules__ = [
    GitHub
]

def fetch():
    for module in __modules__:
        match = module.re_db_mirror.match(conf.db_mirror)
        if match is not None:
            return module(**match.groupdict())