Skip to content

Instantly share code, notes, and snippets.

@zunayed-arp
Forked from salayhin/jira_analytics.py
Created September 10, 2018 20:40

Revisions

  1. @salayhin salayhin revised this gist Sep 10, 2018. 1 changed file with 2 additions and 2 deletions.
    4 changes: 2 additions & 2 deletions jira_analytics.py
    Original file line number Diff line number Diff line change
    @@ -13,7 +13,7 @@ def __init__(self, name, start_date, end_date):
    self.name = name
    self.start_date = start_date
    self.end_date = end_date
    server_url = "https://augmedix.atlassian.net"
    server_url = "https://company_name.atlassian.net"
    self.jira_server = {'server': server_url}

    self.jira_user = "******"
    @@ -98,7 +98,7 @@ def import_data(self):
    data = data.loc[(pd.to_datetime(data['logged_date'].dt.date) >= self.start_date) & (pd.to_datetime(data['logged_date'].dt.date) <= self.end_date)]
    data = data.sort_values(by=['logged_date'])
    groupby_data = data.groupby(['jira_key'])['logged_time'].agg('sum')
    data.to_csv("/home/salayhin/Desktop/"+self.name +".csv", encoding='utf-8', index=False)
    data.to_csv("/path/to/file/"+self.name +".csv", encoding='utf-8', index=False)
    print("Data import into csv done...")


  2. @salayhin salayhin created this gist Mar 2, 2018.
    112 changes: 112 additions & 0 deletions jira_analytics.py
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,112 @@
    from jira import JIRA
    import pandas as pd
    import pdb
    from collections import defaultdict
    import datetime
    import pprint


    class ImportJiraAnalyticsData:

    def __init__(self, name, start_date, end_date):

    self.name = name
    self.start_date = start_date
    self.end_date = end_date
    server_url = "https://augmedix.atlassian.net"
    self.jira_server = {'server': server_url}

    self.jira_user = "******"
    self.jira_password = "******"
    self.jira = self.jira_obj()

    def jira_obj(self):
    return JIRA(options=self.jira_server, basic_auth=(self.jira_user, self.jira_password))

    def get_work_logs_form_jira(self, issue_number):
    return self.jira.worklogs(issue_number)

    def get_worked_issues(self):
    jql = 'worklogAuthor =' + '"' + self.name + '"' + "AND worklogDate >= " + '"' + self.start_date + '"' + "AND worklogDate <= " + '"' + self.end_date + '"'
    return self.jira.search_issues(jql, maxResults=30)

    def prepare_data(self):
    issues = self.get_worked_issues()

    data = defaultdict(list)
    jira_data = None

    for issue in issues:
    jira_id = issue.raw.get('id')
    jira_key = issue.raw.get('key')
    api_reponse_url = issue.raw.get('self')
    status = issue.raw.get('fields').get('status').get('name')
    priority = issue.raw.get('fields').get('priority').get('name')
    title = issue.raw.get('fields').get('summary')
    #description = issue.raw.get('fields').get('description')
    description = ""

    assignee = issue.raw.get('fields').get('assignee').get('displayName')
    reporter = issue.raw.get('fields').get('reporter').get('displayName')

    time_estimate = (issue.raw.get('fields').get('timeestimate') / 3600)
    timespent = issue.raw.get('fields').get('timespent')

    due_date = issue.raw.get('fields').get('duedate')
    jira_created_date = datetime.datetime.strptime((issue.raw.get('fields').get('created').split('.')[0].replace('T', " ")), '%Y-%m-%d %H:%M:%S')
    last_updated = issue.raw.get('fields').get('updated').split('.')[0].replace('T', " ")

    worklogs = self.get_work_logs_form_jira(jira_id)

    for worklog in worklogs:
    if worklog.author.key == self.name:

    logged_by = worklog.author.displayName
    logged_date = datetime.datetime.strptime((worklog.created.split('.')[0].replace('T', " ")), '%Y-%m-%d %H:%M:%S')
    logged_comment = worklog.comment
    logged_time_in_sec = worklog.timeSpentSeconds

    data['jira_id'].append(jira_id if jira_id else None)
    data['jira_key'].append(jira_key if jira_key else None)
    data['jira_created_date'].append(jira_created_date if jira_created_date else None)
    data['api_reponse_url'].append(api_reponse_url if api_reponse_url else None)
    data['status'].append(status if status else None)
    data['priority'].append(priority if priority else None)
    data['title'].append(title if title else None)
    data['description'].append(description if description else None)
    data['assignee'].append(assignee if assignee else None)
    data['reporter'].append(reporter if reporter else None)
    data['time_estimate'].append(time_estimate if time_estimate else None)
    data['timespent'].append(timespent if timespent else None)
    data['due_date'].append(due_date if due_date else None)
    data['last_updated'].append(last_updated if last_updated else None)
    data['logged_date'].append(logged_date if logged_date else None)
    data['logged_comment'].append(logged_comment if logged_comment else None)
    data['logged_by'].append(logged_by if logged_by else None)
    data['logged_time'].append((logged_time_in_sec / 3600) if logged_time_in_sec else None)


    jira_data = pd.DataFrame(data, columns=['jira_id', 'jira_key', 'jira_created_date', 'api_reponse_url', 'status', 'priority',
    'title', 'description', 'assignee', 'reporter', 'time_estimate', 'timespent',
    'due_date', 'last_updated', 'logged_date', 'logged_comment', 'logged_by',
    'logged_time'])

    return jira_data

    def import_data(self):
    data = self.prepare_data()
    data = data.loc[(pd.to_datetime(data['logged_date'].dt.date) >= self.start_date) & (pd.to_datetime(data['logged_date'].dt.date) <= self.end_date)]
    data = data.sort_values(by=['logged_date'])
    groupby_data = data.groupby(['jira_key'])['logged_time'].agg('sum')
    data.to_csv("/home/salayhin/Desktop/"+self.name +".csv", encoding='utf-8', index=False)
    print("Data import into csv done...")


    if __name__ == "__main__":

    name = "jira name goes here"
    start_date = "2018-02-01"
    end_date = "2018-02-28"

    jira = ImportJiraAnalyticsData(name, start_date, end_date)
    jira.import_data()