-
Notifications
You must be signed in to change notification settings - Fork 6
/
Copy pathadd_remove_proxmox_accounts.py
executable file
·294 lines (246 loc) · 10 KB
/
add_remove_proxmox_accounts.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
#! /usr/bin/env python3
"""
automating adding and deleting proxmox users based on content of
an external database, in this case a json file.
"""
import sys, os, json, requests, subprocess
# job titles that do not require an account.
titignore = ["Program Assistant", "Data Coordinator", "Project Coordinator",
"Administrative Assistant", "Administrative Coordinator",
"Office Worker", "Data Operations Manager",
"Clinical Research Coordinator", "Administrative Manager",
"Data Entry Operator", "Clinical Research Nurse",
"Veterinary Technician", "Animal Equipment Preparer",
"Program Administrator", "Senior Project Manager",
"Coordinating Center Manager", "Animal Technician",
"Member Emeritus", "Nurse Manager", "Financial Analyst",
"Yoga Teacher", ""]
# the user database of potential scientific computing users
j = requests.get('https://toolbox.fhcrc.org/json/sc_users.json').json()
def main():
# adding groups ##########################################
groups = uniq(jget(j, 'pi_dept'))
groups_add, groups_remove = listcompare('/var/tmp/groups_last.json', groups)
print("\nAdding %s groups/pools...:" % len(groups_add),groups_add)
if len(groups_add) <= 100:
for g in groups_add:
d = jsearchone(j,'pi_dept',g,'department')
s = ''
s = s + 'pvesh create /pools -poolid %s -comment "%s"\n' % (g,d.strip())
s = s + 'pveum groupadd %s -comment "%s"\n' % (g,d.strip())
s = s + 'pveum aclmod /pool/%s/ -group %s -role PVEAdmin\n' % (g,g)
s = s + 'pveum aclmod /storage/proxazfs/ -group %s -role PVEDatastoreUser\n' % g
s = s + 'pveum aclmod /storage/proxnfs/ -group %s -role PVEDatastoreUser\n' % g
print(s)
ret = run_script(s, output=True)
if ret > 0:
print('******** Error : %s' % ret)
else:
print('Error: will not add batches of more than 100 groups')
# save the list of currently processed groups
with open('/var/tmp/groups_last.json', 'w') as outfile:
json.dump(groups, outfile)
# adding users #########################################
uids = uniq(jget(j, 'uid'))
uids_add, uids_del = listcompare('/var/tmp/uids_last.json', uids)
# adding new users but never more than 1000
x = len(uids_add) - 1
if x > 100: x = 100
print("\nAdding %s users...:" % len(uids_add), uids_add[0:x])
n = 1
if len(uids_add) <= 1000:
for uid in uids_add:
print('%s: %s' % (n,uid))
# ignore some jobtitles
if jsearchone(j,"uid",uid,"mail") == "" or jsearchone(j,"uid",uid,"title") in titignore:
continue
##### this is too long, /etc/pve/user.cfg can only be 128K
#s = 'pveum useradd %[email protected] -email %s -firstname %s -lastname %s -groups %s -comment "%s"' % \
# (uids[n],mails[n], givenNames[n], sns[n], pi_depts[n], departments[n].strip())
s = 'pveum useradd %[email protected] -groups %s' % (uid, jsearchone(j,"uid",uid,"pi_dept"))
ret = run_script(s, output=True)
n+=1
if ret > 0:
print('******** Error : %s' % ret)
else:
print('Error: will not add batches of more than 1000 users')
# deleting diabled users but never more than 10
print("\nDeleting %s users...:" % len(uids_del),uids_del)
if len(uids_del) <= 10:
for uid in uids_del:
print('test: del user %s' % uid)
break
s = 'pveum userdel %[email protected] ' % uid
ret = run_script(s, output=True)
if ret > 0:
print('******** Error : %s' % ret)
else:
print('Error: will not delete more than 1000 users at a time')
# save the list of currently processed uids
with open('/var/tmp/uids_last.json', 'w') as outfile:
json.dump(uids, outfile)
########################################################################
# some helper functions
def listcompare(oldjsonfile,newlist):
""" compares a list with a previously saved list and returns
a list of newly add items and a list of removed items.
"""
addedlist, removedlist = newlist, []
if os.path.exists(oldjsonfile):
with open(oldjsonfile, 'r') as f:
oldlist=json.load(f)
addedlist = [item for item in newlist if item not in oldlist]
removedlist = [item for item in oldlist if item not in newlist]
return addedlist, removedlist
def jsearch(json,sfld,search,rfld):
""" return a list of values from a column based on a search """
lst=[]
for j in json:
if j[sfld]==search or search == '*':
lst.append(j[rfld].strip())
return lst
def jsearchone(json,sfld,search,rfld):
""" return the first search result of a column based search """
for j in json:
if j[sfld]==search:
return j[rfld].strip()
def jget(json,rfld):
""" return all values in one column """
lst=[]
for j in json:
if j[rfld].strip() != "":
lst.append(j[rfld].strip())
return lst
def uniq(seq):
""" remove duplicates from a list """
# Not order preserving
keys = {}
for e in seq:
keys[e] = 1
return list(keys.keys())
class ScriptException(Exception):
def __init__(self, returncode, stdout, stderr, script):
self.returncode = returncode
self.stdout = stdout
self.stderr = stderr
Exception.__init__('Error in script')
def run_script(script, output=True, stdin=None):
"""Returns (stdout, stderr), raises error on non-zero return code"""
# Note: by using a list here (['bash', ...]) you avoid quoting issues, as the
# arguments are passed in exactly this order (spaces, quotes, and newlines won't
# cause problems):
stdout = ""
for line in script.split('\n'):
if output:
try:
if line:
print("************* Executing command: %s" % line)
stdout = subprocess.call(line,shell=True)
except:
print("Error executing command: %s" % line)
print("Error: %s" % stdout)
else:
proc = subprocess.Popen(['bash', '-c', line],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
stdin=subprocess.PIPE)
stdout, stderr = proc.communicate()
if proc.returncode:
raise ScriptException(proc.returncode, stdout, stderr, script)
return stdout
def send_mail(to, subject, text, attachments=[], cc=[], bcc=[], smtphost="", fromaddr=""):
""" sends email, perhaps with attachment """
if sys.version_info[0] == 2:
from email.MIMEMultipart import MIMEMultipart
from email.MIMEBase import MIMEBase
from email.MIMEText import MIMEText
from email.Utils import COMMASPACE, formatdate
from email import Encoders
else:
from email.mime.multipart import MIMEMultipart
from email.mime.base import MIMEBase
from email.mime.text import MIMEText
from email.utils import COMMASPACE, formatdate
from email import encoders as Encoders
from string import Template
import socket
import smtplib
if not isinstance(to,list):
print("the 'to' parameter needs to be a list")
return False
if len(to)==0:
print("no 'to' email addresses")
return False
myhost=socket.getfqdn()
if smtphost == '':
smtphost = get_mx_from_email_or_fqdn(myhost)
if not smtphost:
sys.stderr.write('could not determine smtp mail host !\n')
if fromaddr == '':
fromaddr = os.path.basename(__file__) + '-no-reply@' + \
'.'.join(myhost.split(".")[-2:]) #extract domain from host
tc=0
for t in to:
if '@' not in t:
# if no email domain given use domain from local host
to[tc]=t + '@' + '.'.join(myhost.split(".")[-2:])
tc+=1
message = MIMEMultipart()
message['From'] = fromaddr
message['To'] = COMMASPACE.join(to)
message['Date'] = formatdate(localtime=True)
message['Subject'] = subject
message['Cc'] = COMMASPACE.join(cc)
message['Bcc'] = COMMASPACE.join(bcc)
body = Template('This is a notification message from $application, running on \n' + \
'host $host. Please review the following message:\n\n' + \
'$notify_text\n\n'
)
host_name = socket.gethostname()
full_body = body.substitute(host=host_name.upper(), notify_text=text, application=os.path.basename(__file__))
message.attach(MIMEText(full_body))
for f in attachments:
part = MIMEBase('application', 'octet-stream')
part.set_payload(open(f, 'rb').read())
Encoders.encode_base64(part)
part.add_header('Content-Disposition', 'attachment; filename="%s"' % os.path.basename(f))
message.attach(part)
addresses = []
for x in to:
addresses.append(x)
for x in cc:
addresses.append(x)
for x in bcc:
addresses.append(x)
smtp = smtplib.SMTP(smtphost)
smtp.sendmail(fromaddr, addresses, message.as_string())
smtp.close()
return True
def parse_arguments():
"""
Gather command-line arguments.
"""
pass
#parser = argparse.ArgumentParser(prog='prox ',
#description='a tool for deploying resources from proxmox ' + \
#'(LXC containers or VMs)')
#parser.add_argument( 'command', type=str, default='deploy', nargs='?',
#help="a command to be executed. (deploy, start, stop)")
#parser.add_argument('--hosts', '-n', dest='hosts', action='store', default=[], nargs='*',
#help='hostnames of your new VM/containers')
#parser.add_argument('--image', '-i', dest='image', action='store', default='',
#help='image we use to clone')
#parser.add_argument( '--debug', '-d', dest='debug', action='store_true', default=False,
#help="do not send an email but print the result to console")
#parser.add_argument('--mailto', '-m', dest='mailto', action='store', default='',
#help='send email address to notify of a new deployment.')
#return parser.parse_args()
if __name__=="__main__":
args = parse_arguments()
try:
main()
except KeyboardInterrupt:
print ('Exit !')
try:
sys.exit(0)
except SystemExit:
os._exit(0)