Adding files to BitBucket repository using BitBucket Server REST API - bitbucket-server

Everytime a new file is added to my server, I want that file to be added into BitBucket server. The server containing files is non-git and I want to know if this is possible programmatically using Bitbucket server rest api. Is it possible to do a remote commit and push to a certain branch using rest api? I took a look into the documentation and couldn't figure out which is the one for this scenario. Any help would be appreciated.

Here is a snippet of Python which uses the Bitbucket REST API:
#!/usr/bin/python
import os
import tempfile
import sys
import urllib2
import json
import base64
import logging
import re
import pprint
import requests
import subprocess
projectKey= "FW"
repoKey = "fw"
branch = "master"
pathToVersionProperties = "core/CruiseControl/CI_version.properties"
localVersionProperties = "CI_version.properties"
bitbucketBaseUrl = "https://bitbucket.company.com/rest/api/latest"
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(levelname)s %(message)s')
def checkPersonalAccessToken():
try:
os.environ["PAT"]
logging.info("Detected Personal Access Token")
except KeyError:
logging.error("Personal Access Token: $PAT env variable not set, update Jenkins master with correct environment variable")
sys.exit(1)
def getJenkinsPropertiesFile():
restEndpoint = "{}/projects/{}/repos/{}/raw/{}".format(bitbucketBaseUrl, projectKey, repoKey, pathToVersionProperties)
logging.info("REST endpoint : {}".format(restEndpoint))
request = urllib2.Request(restEndpoint)
request.add_header("Authorization", "Bearer %s" % os.environ["PAT"])
result = urllib2.urlopen(request).read()
return result
def extractBuildNumber(propertiesString):
m = re.search(r'BUILD_NUMBER=(\d+)', propertiesString)
if m:
logging.info("Current build number: {}".format(m.group(1)))
else:
logging.error("Failed to extract build number")
sys.exit(1)
return int(m.group(1))
def extractVersion(propertiesString):
m = re.search(r'\nVERSION=(.*)', propertiesString)
if m:
logging.info("Current version: {}".format(m.group(1)))
else:
logging.error("Failed to extract version")
sys.exit(1)
return m.group(1)
def updateBuildNumber(propertiesString, currentBuild, newBuildNumber):
buildString = "BUILD_NUMBER=%s" % currentBuild
newBuildString = "BUILD_NUMBER=%s" % newBuildNumber
return propertiesString.replace(buildString, newBuildString)
def getLatestCommit():
restEndpoint = "{}/projects/{}/repos/{}/commits?path={}".format(bitbucketBaseUrl, projectKey, repoKey, pathToVersionProperties)
logging.info("REST endpoint : {}".format(restEndpoint))
request = urllib2.Request(restEndpoint)
request.add_header("Authorization", "Bearer %s" % os.environ["PAT"])
result = json.loads(urllib2.urlopen(request).read())
latestCommit = result["values"][0]["displayId"]
if(len(latestCommit) > 0):
logging.info("Latest commit: {}".format(latestCommit))
else:
logging.error("Commit hash is empty, failed to retrieve latest commit")
sys.exit(1)
return latestCommit
def commitUpdatedBuildNumber(commitId, commitMessage, updateVersionProperties):
restEndpoint = "{}/projects/{}/repos/{}/browse/{}".format(bitbucketBaseUrl, projectKey, repoKey, pathToVersionProperties)
logging.info("REST endpoint : {}".format(restEndpoint))
tempFile = tempfile.NamedTemporaryFile(delete=False)
try:
with open(tempFile.name, "w") as f:
f.write(updateVersionProperties)
finally:
tempFile.close()
f.close()
curlCommand = "curl -s -S -X PUT -H \"Authorization: Bearer %s\" -F content=#%s -F \'message=%s\' -F branch=master -F sourceCommitId=%s %s" % (os.environ["PAT"], tempFile.name, commitMessage,commitId, restEndpoint)
try:
FNULL = open(os.devnull, 'w')
subprocess.call(curlCommand, shell=True, stdout=FNULL)
finally:
logging.info("Committed version update")
FNULL.close()
def writeCommitNumber(latestCommit):
revisionFile = "%s/%s/%s" % (os.environ["HOME"],"git_revisions", "FW_11_CI_git.revision")
logging.info("Revision file: {}".format(revisionFile))
writeString = "%s=%s\n" % ("VERSION_PROPERTIES_REVISION", latestCommit)
try:
with open(revisionFile,"w") as f:
f.write(writeString)
finally:
f.close()
checkPersonalAccessToken()
propertiesString = getJenkinsPropertiesFile()
currentBuildNumber = extractBuildNumber(propertiesString)
currentVersion = extractVersion(propertiesString)
newBuildNumber = currentBuildNumber + 1
newBuild = "%s%s%s" % (currentVersion,'B',newBuildNumber)
logging.info("New build number: {}".format(newBuild))
updatedPropertiesString = updateBuildNumber(propertiesString, currentBuildNumber, newBuildNumber)
commitMessage = "%s %s" % (newBuild, "Version Update")
latestCommit = getLatestCommit()
commitUpdatedBuildNumber(latestCommit, commitMessage, updatedPropertiesString)
writeCommitNumber(latestCommit)

Related

Integration of Locust with Pytest

I am trying to integrate pytest and locust and facing the issue PytestCollectionWarning: cannot collect test class because it has a init constructor
import json
import pytest
import requests
from locust import SequentialTaskSet, task
request_url = ""
module_name = "shifts"
class TestShifts(SequentialTaskSet):
def get_shifts_data(self, login, request_data):
global request_url
name = "Get Shifts Data"
request_url = login.get_base_url() + request_data['path']
with self.client.get(request_url, catch_response=True, name=name, headers=login.get_header_get_request()) \
as get_shift_response:
if get_shift_response.status_code == 200:
get_shift_response.success()
return json.loads(get_shift_response.content)
else:
get_shift_response.failure("Get Shifts Failure")
#pytest.mark.usefixtures("login", "request_data")
def test_delete_shift(self, login, request_data):
global request_url
name = "Delete Shift with name :" + request_data['requestBody']['name']
shifts_data_dict = self.get_shifts_data(login, request_data)
for shift in shifts_data_dict['shifts']:
if shift['name'] == request_data['requestBody']['name']:
with self.client.delete(request_url + "/" + shift['id'], catch_response=True, name=name,
headers=login.get_header_get_request()) as shift_delete_response:
if shift_delete_response.status_code == 204:
shift_delete_response.success()
else:
shift_delete_response.failure("Shift Delete Failure")

azure pipeline to delete the old azure git branch(not repo)

I am trying to create a azure pipeline to delete the old azure git branch(not repo).
So that creating an automated pipeline which will take bellow parameters.
Project Name
Repo Name
Target date
Based on input provided, all branches created before the target date for the given repo should be deleted.
Note :- We will only delete the child branch not master.
Rules
Only branches should be deleted on the basis be dry run flag if flag is true delete all branches in repo within given target date excluding master branch.
It’s better if we can write the code in python.
I am using rest azure rest api to call the branch but not able to delete as per date parameters.
all thing is working except user input not working in azure pipeline
which i had took as hard code.
For the user input (input credentials ), please reference below sample:
import requests
import base64
repo_endpoint_url = "https://dev.azure.com/<organization>/<project>/_apis/git/repositories?api-version=5.1"
username = "" # This can be an arbitrary value or you can just let it empty
password = "<your-password-here>"
userpass = username + ":" + password
b64 = base64.b64encode(userpass.encode()).decode()
headers = {"Authorization" : "Basic %s" % b64}
response = requests.get(repo_endpoint_url, headers=headers)
print(response.status_code) # Expect 200
You can also try using the PAT or OAuth token $env:SYSTEM_ACCESSTOKEN directly (use PAT or $env:SYSTEM_ACCESSTOKEN to replace the password).
However, to enable your script to use the build pipeline OAuth token, we need to go to the Options tab of the build pipeline and select Allow Scripts to Access OAuth Token.
I am using rest azure rest api to call the branch but not able to delete as per date parameters.
import requests
import sys
from datetime import datetime as dt
import json
from git import Repo
import git
import time
username = '<u name>'
auth_key = '<auth key>'
class gitRepoDeletion:
def getRepo(self, organization_name, project_name, repo_name):
"""
Getting the repo details
from the user and
flitering the master
branch with date functionality(still implementing)
"""
getting_repo_list = "https://dev.azure.com/" + organization_name + '/' + \
project_name + "/_apis/git/repositories/" + repo_name + "/refs?api-version=5.0"
get_reponse = requests.get(
getting_repo_list, auth=(username,auth_key))
try:
repojson = json.loads(get_reponse.content)
except ValueError:
print("Error loading json file")
output_json = [x for x in repojson['value']
if x['name'] != 'refs/heads/master']
with open('/home/vsts/work/1/s/data.json', 'w', encoding='utf-8') as f:
json.dump(output_json, f, ensure_ascii=False, indent=4)
def filtering_branches(self, organization_name, project_name, repo_name, user_date):
"""
Filtering branches according
to the date passed by user
"""
git_url = "https://" + organization_name + "#dev.azure.com" + '/' + \
organization_name + '/' + project_name + '/_git' + '/' + repo_name
branches = Repo.clone_from(git_url, "./mylocaldir209")
remote_branches = []
for ref in branches.git.branch('-r').split('\n'):
if ref != ' origin/HEAD -> origin/master':
if ref != ' origin/master':
remote_branches.append(ref[9:])
else:
pass
branch_and_timing_dict = {}
for listy in remote_branches:
branches.git.checkout(listy)
commit = branches.head.commit
timing = time.asctime(time.gmtime(commit.committed_date))
timing = time.strftime(
"%d/%m/%Y", time.gmtime(commit.committed_date)).replace(' 0', ' ')
branch_and_timing_dict[listy] = timing
global filterlist
filterlist = []
for key, values in branch_and_timing_dict.items():
d1 = dt.strptime(user_date, "%d/%m/%Y")
d2 = dt.strptime(key, "%d/%m/%Y")
if d1 > d2:
filterlist.append(values)
else:
pass
return filterlist
def repo_delete(self, organization_name, project_name, repo_name, dry_flag):
"""
Deleting repo as
per date input by user
also exlucling master
"""
all_repo_to_be_deleted = []
newObjectId = "0000000000000000000000000000000000000000"
filteredBranchesAsPerDateWithRef = []
for value in filterlist:
filteredBranchesAsPerDateWithRef.append("refs/heads/" + value)
print(value)
print(filteredBranchesAsPerDateWithRef)
# Cluttering extra spaces and lowering the case of the dry flag value passed by the user
# Reading data.json file, which is fetched by the getRepo() method after excluding the master branch
with open('/home/vsts/work/1/s/data.json') as data_file:
json_data = json.load(data_file)
for item in json_data:
name_of_branch = item['name']
objectId = item['objectId']
# Adding name_of_branch in all_repo_to_be_deleted list
all_repo_to_be_deleted.append(name_of_branch)
# Adding objectId in all_repo_to_be_deleted list
# all_repo_to_be_deleted.append(objectId)
passing_branch_name = "https://dev.azure.com/" + organization_name + '/' + \
project_name + "/_apis/git/repositories/" + repo_name + "/refs?api-version=5.0"
headers = {'Content-type': 'application/json'}
for nameOfBranchWithref in filteredBranchesAsPerDateWithRef:
print(nameOfBranchWithref)
nameOfBranchWithref = nameOfBranchWithref
data = [
{
"name": nameOfBranchWithref,
"newObjectId": newObjectId,
"oldObjectId": objectId,
}
]
dry_flag = dry_flag.lower().strip()
if dry_flag == 'true':
repo_delete = requests.post(passing_branch_name, data=json.dumps(
data), headers=headers, auth=(username, auth_key))
print(repo_delete)
else:
with open('delete_repo.txt', 'w') as d:
for item in all_repo_to_be_deleted:
d.write("%s\n" % item)
print("---- This is Dry Run ----")
print("These are the repo to be deleted: ", all_repo_to_be_deleted)
if __name__ == "__main__":
gitRepoDeletion().getRepo('sushmasureshyadav202', 'my_delete_git', 'my_delete_git')
gitRepoDeletion().filtering_branches(
"<azure org name>", '<azure project>', '<azure repo>', "31/1/2020")
gitRepoDeletion().repo_delete("<azure org name>", '<azure project>', '<azure repo>', 'true')

McAfee Update download script

I'm setting PC with McAfee install on them and be told that I need to stop the program going on line to download update (DAT). I need to create a script to download dat file from McAfee web site and put this file on server where McAfee can access and install this.
Has anyone done this in past.
I actually have done this. I haven't tested this script in a year or two but here is what I was using. This isn't written in Powershell but if you change the directories I think this can run on Windows.
#!/usr/bin/python
import ftplib
import tarfile
import shutil
import os
import re
import time
scannerDir = "/usr/local/uvscan/"
tmp = "/tmp/avscanner/"
def downloadDat():
datfile = ""
r = re.compile("^avvdat")
ftp = ftplib.FTP("ftp.nai.com", "anonymous", "email#yourdomain.com")
ftp.cwd("/pub/datfiles/english")
list = ftp.nlst()
for x in list:
if r.search(x):
datFile = x
f = open(tmp + "datfile", 'wb')
ftp.retrbinary("RETR " + datFile, f.write)
f.close()
ftp.quit()
def unpackDat():
tFile = tarfile.open(tmp + "datfile", 'r')
for f in tFile.getnames():
tFile.extract(f, tmp)
def createDirs():
if os.path.isdir(tmp) == False:
os.mkdir(tmp, 0700)
os.chown(tmp, 0, 95)
os.chmod(tmp, 0755)
def doCleanup():
shutil.rmtree(tmp)
def installFiles():
shutil.copyfile(tmp + "/avvclean.dat", scannerDir + "/avvclean.dat")
shutil.copyfile(tmp + "/avvnames.dat", scannerDir + "/avvnames.dat")
shutil.copyfile(tmp + "/avvscan.dat", scannerDir + "/avvscan.dat")
def isOld():
if os.path.isfile(scannerDir + "/avvclean.dat"):
if time.time() - os.path.getctime(scannerDir + "/avvclean.dat") < 80000:
return True
else:
return False
else:
return True
def main():
if isOld():
createDirs()
downloadDat()
unpackDat()
installFiles()
doCleanup()
if __name__ == "__main__":
main()

Push notification from python to iPhone, how to debug?

How do one debug the connection from a provider to Apple push notification server?
I'm using a library called PyAPNs (github repo) and have the code below:
from apns import APNs, Payload
print "start"
apns = APNs(use_sandbox=True, cert_file='apns-prod.pem', key_file='apns-prod.pem')
# Send a notification
token_hex = '*******'
payload = Payload(alert="Hello World!", sound="default", badge=1)
apns.gateway_server.send_notification(token_hex, payload)
# Get feedback messages
for (token_hex, fail_time) in apns.feedback_server.items():
print token_hex
print fail_time
print "end"
The application is registered to receive RemoteNotification an everything looks okey under notification settings in the iPhone. But not notifications shows up.
My questions here how can I debug this. When running the script I don't get any errors and the apns.feedback_server.items is empty. I've tried to print the buffer from the feedback serve, but nothing.
Is there a way to see what's happening in the SSL socket? Or get some response from apples servers?
..fredrik
EDIT
I solved the problem. The issues was with the token_hex. I used the identifier number from the xcode organizer and not the token generated when registering the application.
USE THIS CODE:
#!/usr/bin/python2.7
import socket
import ssl
import json
import struct
import argparse
APNS_HOST = ( 'gateway.sandbox.push.apple.com', 2195 )
class Payload:
PAYLOAD = '{"aps":{${MESSAGE}${BADGE}${SOUND}}}'
def __init__(self):
pass
def set_message(self, msg):
if msg is None:
self.PAYLOAD = self.PAYLOAD.replace('${MESSAGE}', '')
else:
self.PAYLOAD = self.PAYLOAD.replace('${MESSAGE}', '"alert":"%s",' % msg)
def set_badge(self, num):
if num is None:
self.PAYLOAD = self.PAYLOAD.replace('${BADGE}', '')
else:
self.PAYLOAD = self.PAYLOAD.replace('${BADGE}', '"badge":%s,' % num)
def set_sound(self, sound):
if sound is None:
self.PAYLOAD = self.PAYLOAD.replace('${SOUND}', '')
else:
self.PAYLOAD = self.PAYLOAD.replace('${SOUND}', '"sound":"%s",' % sound)
def toString(self):
return (self.PAYLOAD.replace('${MESSAGE}','').replace('${BADGE}','').replace('${SOUND}',''))
def connectAPNS(host, cert):
ssl_sock = ssl.wrap_socket( socket.socket( socket.AF_INET, socket.SOCK_STREAM ), certfile = cert )
ssl_sock.connect( APNS_HOST )
return ssl_sock
def sendNotification(sslSock, device, message, badge, sound):
payload = Payload()
payload.set_message(message)
payload.set_badge(badge)
payload.set_sound(sound)
payloadAsStr = payload.toString()
format = '!BH32sH%ds' % len(payloadAsStr)
binaryDeviceToken = device.replace(' ','').decode('hex')
binaryNotification = struct.pack( format, 0, 32, binaryDeviceToken, len(payloadAsStr), payloadAsStr )
print ("sending payload: ["+payloadAsStr+"] as binary to device: ["+device+"]")
sslSock.write(binaryNotification)
def printUsageAndExit():
print("msg2ios - Version 0.1\nmsg2IOS.py -d <device> -m <message> -s[plays sound] -b <badgeint> -c <certBundlePath>")
exit(1)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--device')
parser.add_argument('-m', '--message')
parser.add_argument('-s', '--sound')
parser.add_argument('-b', '--badge')
parser.add_argument('-c', '--cert')
args = parser.parse_args()
if (args.device is None) or ((args.message is None) and (args.sound is None) and (args.badge is None)) or (args.cert is None):
printUsageAndExit()
sslSock = connectAPNS(APNS_HOST, args.cert)
sendNotification(sslSock, args.device, args.message, args.badge, args.sound)
sslSock.close()

web automation - auto check link

I'm new to web app and I want to check when there's a new version of dota map, I'll check links in getdota.com.
How can I do this and which language, I want it checks every time you start warcraft, and auto download new map to specific folder.
My question is : Can you give a link to a specific article about web automation or something like that.
Thanks first :)
Below is an example in Python.
It parses getdota.com page, reads parameters for POST request for downloading a map, gets the file and saves it in configured directory (by default current directory).
#!/usr/bin/env python
import urllib
import urllib2
import sgmllib
from pprint import pprint
import os.path
import sys
url = 'http://www.getdota.com/'
download_url = 'http://www.getdota.com/app/getmap/'
chunk = 10000
directory = '' #directory where file should be saved, if empty uses current dir
class DotaParser(sgmllib.SGMLParser):
def parse(self, s):
self.feed(s)
self.close()
def __init__(self, verbose=0):
sgmllib.SGMLParser.__init__(self, verbose)
self.URL = ''
self.post_args = {}
def getArgs(self):
return self.post_args
def start_input(self, attributes):
d = dict(attributes)
if d.get('id', None) == None:
return
if d['id'] in ["input_mirror2", "input_file_name2", "input_map_id2", "input_language2", "input_language_id2"]:
self.post_args[d['name']] = d['value']
if __name__ == '__main__':
dotap = DotaParser()
data = urllib2.urlopen(urllib2.Request('http://www.getdota.com/')).read()
dotap.parse(data)
data = urllib.urlencode(dotap.getArgs())
request = urllib2.Request(download_url, data)
response = urllib2.urlopen(request)
page = response.read()
#download file
fname = directory + page.split('/')[-1]
if os.path.isfile(fname):
print "No newer file available"
sys.exit(0)
f = open(fname, 'w')
print "New file available. Saving in: %s" % fname
webFile = urllib.urlopen(page)
c = webFile.read(chunk)
while(c):
f.write(c)
c = webFile.read(chunk)
f.close()
webFile.close()