Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Only get the last result when crawling using sqlmapapi #2503

Open
walio opened this issue Apr 26, 2017 · 1 comment
Open

Only get the last result when crawling using sqlmapapi #2503

walio opened this issue Apr 26, 2017 · 1 comment

Comments

@walio
Copy link

walio commented Apr 26, 2017

System information

python version:2.7.13
Operating system:Windows 7
Sqlmap version:1.1.4.48#dev

Related webpages:

intro.php

<a href="test.php?username="></a>
<a href="test2.php?username="></a>

test.php and test2.php are the same like:

<?php
       $conn=@mysql_connect("localhost",'root','')or die("connect failed!");;
      mysql_select_db("test",$conn) or die("db not exist!");
      $name=$_GET['username'];
      $query=mysql_query("select * from users where username='$name'");
      $arr=mysql_fetch_array($query);
      if(is_array($arr)){
             echo "success";
       }else{
             echo "failed";
       }
?>

Issue detail

I uses python sqlmapapi.py -s to start a server,and uses the following script to use crawl to detect injection:

#!/usr/bin/python
import requests
import time
import json
from pprint import pprint

class AutoSqli(object):

    def __init__(self, server='', target='',data = '',referer = '',cookie = ''):
        super(AutoSqli, self).__init__()
        self.server = server
        if self.server[-1] != '/':
            self.server = self.server + '/'
        self.target = target
        self.taskid = ''
        self.engineid = ''
        self.status = ''
        self.data = data
        self.referer = referer
        self.cookie = cookie
        self.start_time = time.time()

    def task_new(self):
        self.taskid = json.loads(requests.get(self.server + 'task/new').text)['taskid']

    def task_delete(self):
        if json.loads(requests.get(self.server + 'task/' + self.taskid + '/delete').text)['success']:
            return True
        return False

    def scan_start(self):
        headers = {'Content-Type': 'application/json'}
        payload = {'url': self.target}
        url = self.server + 'scan/' + self.taskid + '/start'
        t = json.loads(requests.post(url, data=json.dumps(payload), headers=headers).text)
        self.engineid = t['engineid']

    def scan_status(self):
        self.status = json.loads(
            requests.get(self.server + 'scan/' + self.taskid + '/status').text)['status']
        if self.status == 'running':
            return 'running'
        elif self.status == 'terminated':
            return 'terminated'
        else:
            return 'error'

    def scan_data(self):
        self.data = json.loads(requests.get(self.server + 'scan/' + self.taskid + '/data').text)['data']

    def option_set(self):
        headers = {'Content-Type': 'application/json'}
        option = {
                    "smart": True,
                    "crawlDepth" : 1
                 }
        url = self.server + 'option/' + self.taskid + '/set'
        requests.post(url, data=json.dumps(option), headers=headers)

    def scan_stop(self):
        requests.get(self.server + 'scan/' + self.taskid + '/stop')         

    def scan_kill(self):
        requests.get(self.server + 'scan/' + self.taskid + '/kill')
        
    def run(self):
        self.task_new();
        self.option_set()
        self.scan_start();
        while True:
            if self.scan_status() == 'running':
                time.sleep(1)
            elif self.scan_status() == 'terminated':
                break
            else:
                break
            if time.time() - self.start_time > 3000:
                error = True
                self.scan_stop()
                self.scan_kill()
                break
        self.scan_data()
        self.task_delete()
        for result in self.data:
            if result['type']==0:
                pprint(result)

if __name__ == '__main__':
    t = AutoSqli('http://127.0.0.1:8775', 'http://localhost/intro.php')
    t.run()

The script start a new scan from intro.php and the crawldepth is 1.I expect to return the result of scanning test.php and test2.php because they are the same. but it only return the result of scanning test2.php:

{u'status': 1,
 u'type': 0,
 u'value': {u'data': None,
            u'query': u'username=',
            u'url': u'http://localhost:80/test2.php'}}

Suggest Solution

When I comment the if in the write function of class StdDbOut in /lib/utils/api.py line 241 to line 244:

if len(output) > 0:
                    for index in xrange(len(output)):
                        conf.databaseCursor.execute("DELETE FROM data WHERE id = ?",
                                                     (output[index][0],))

I found it can return the result as I expect

{u'status': 1,
 u'type': 0,
 u'value': {u'data': None,
            u'query': u'username=',
            u'url': u'http://localhost:80/test.php'}}
{u'status': 1,
 u'type': 0,
 u'value': {u'data': None,
            u'query': u'username=',
            u'url': u'http://localhost:80/test2.php'}}

I guess this is not a good solution,but it worked just as well so far.I hope the problem can be really solved.

@686583615708183
Copy link

https://www.facebook.com/profile.php?id=100069654810948#!/usr/bin/python
import requests
import time
import json
from pprint import pprint

class AutoSqli(object):

def __init__(self, server='', target='',data = '',referer = '',cookie = ''):
    super(AutoSqli, self).__init__()
    self.server = server
    if self.server[-1] != '/':
        self.server = self.server + '/'
    self.target = target
    self.taskid = ''
    self.engineid = ''
    self.status = ''
    self.data = data
    self.referer = referer
    self.cookie = cookie
    self.start_time = time.time()

def task_new(self):
    self.taskid = json.loads(requests.get(self.server + 'task/new').text)['taskid']

def task_delete(self):
    if json.loads(requests.get(self.server + 'task/' + self.taskid + '/delete').text)['success']:
        return True
    return False

def scan_start(self):
    headers = {'Content-Type': 'application/json'}
    payload = {'url': self.target}
    url = self.server + 'scan/' + self.taskid + '/start'
    t = json.loads(requests.post(url, data=json.dumps(payload), headers=headers).text)
    self.engineid = t['engineid']

def scan_status(self):
    self.status = json.loads(
        requests.get(self.server + 'scan/' + self.taskid + '/status').text)['status']
    if self.status == 'running':
        return 'running'
    elif self.status == 'terminated':
        return 'terminated'
    else:
        return 'error'

def scan_data(self):
    self.data = json.loads(requests.get(self.server + 'scan/' + self.taskid + '/data').text)['data']

def option_set(self):
    headers = {'Content-Type': 'application/json'}
    option = {
                "smart": True,
                "crawlDepth" : 1
             }
    url = self.server + 'option/' + self.taskid + '/set'
    requests.post(url, data=json.dumps(option), headers=headers)

def scan_stop(self):
    requests.get(self.server + 'scan/' + self.taskid + '/stop')         

def scan_kill(self):
    requests.get(self.server + 'scan/' + self.taskid + '/kill')
    
def run(self):
    self.task_new();
    self.option_set()
    self.scan_start();
    while True:
        if self.scan_status() == 'running':
            time.sleep(1)
        elif self.scan_status() == 'terminated':
            break
        else:
            break
        if time.time() - self.start_time > 3000:
            error = True
            self.scan_stop()
            self.scan_kill()
            break
    self.scan_data()
    self.task_delete()
    for result in self.data:
        if result['type']==0:
            pprint(result)

if name == 'main':
t = AutoSqli('http://127.0.0.1:8775', 'http://localhost/intro.php')
t.run()

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

2 participants