Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
BHEAppE
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container registry
Model registry
Operate
Environments
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
GitLab community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
raas
BHEAppE
Commits
84ce3fce
Commit
84ce3fce
authored
Dec 1, 2020
by
strakpe
Browse files
Options
Downloads
Patches
Plain Diff
update
parent
fe185c03
Branches
Branches containing commit
No related tags found
No related merge requests found
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
bheappe/raas_render.py
+30
-1237
30 additions, 1237 deletions
bheappe/raas_render.py
img/addon_authorization.png
+0
-0
0 additions, 0 deletions
img/addon_authorization.png
with
30 additions
and
1237 deletions
bheappe/raas_render.py
+
30
−
1237
View file @
84ce3fce
...
@@ -31,7 +31,6 @@ import typing
...
@@ -31,7 +31,6 @@ import typing
################################
################################
import
time
import
time
#time_file_transfer_req = time.time()
################################
################################
import
bpy
import
bpy
...
@@ -51,13 +50,6 @@ log = logging.getLogger(__name__)
...
@@ -51,13 +50,6 @@ log = logging.getLogger(__name__)
################################
################################
# def is_image_type(render_output_type: str) -> bool:
# """Determines whether the render output type is an image (True) or video (False)."""
# # This list is taken from rna_scene.c:273, rna_enum_image_type_items.
# video_types = {'AVI_JPEG', 'AVI_RAW', 'FRAMESERVER', 'FFMPEG', 'QUICKTIME'}
# return render_output_type not in video_types
def
redraw
(
self
,
context
):
def
redraw
(
self
,
context
):
if
context
.
area
is
None
:
if
context
.
area
is
None
:
return
return
...
@@ -142,38 +134,6 @@ class RAAS_PT_simplify(RaasButtonsPanel, Panel):
...
@@ -142,38 +134,6 @@ class RAAS_PT_simplify(RaasButtonsPanel, Panel):
# Show current status of Raas.
# Show current status of Raas.
raas_status
=
context
.
window_manager
.
raas_status
raas_status
=
context
.
window_manager
.
raas_status
# status_row = box.split(**raas_pref.factor(0.25), align=True)
# status_row.label(text='Status:')
# status_row.label(text=raas_status)
# outdir_label_row = outdir_row.row(align=True)
# if raas_status in {'IDLE', 'ABORTED', 'DONE'}:
# # if prefs.raas_show_quit_after_submit_button:
# # ui = layout.split(**raas_pref.factor(0.75), align=True)
# # else:
# ui = box
# # ui.operator(RAAS_OT_prepare_files.bl_idname,
# # text='Prepare files',
# # icon='RENDER_ANIMATION')
# #.quit_after_submit = False
# # if prefs.raas_show_quit_after_submit_button:
# # ui.operator(RAAS_OT_prepare_files.bl_idname,
# # text='Submit & Quit',
# # icon='RENDER_ANIMATION').quit_after_submit = True
# # if bpy.app.debug:
# # layout.operator(RAAS_OT_copy_files.bl_idname)
# if raas_status == 'INVESTIGATING':
# row = box.row(align=True)
# row.label(text='Investigating your files')
# row.operator(RAAS_OT_abort.bl_idname, text='', icon='CANCEL')
# elif raas_status == 'COMMUNICATING':
# box.label(text='Communicating with Raas Server')
# elif raas_status == 'ABORTING':
# row = box.row(align=True)
# row.label(text='Aborting, please wait.')
# row.operator(RAAS_OT_abort.bl_idname, text='', icon='CANCEL')
row
=
box
.
row
(
align
=
True
)
row
=
box
.
row
(
align
=
True
)
if
raas_status
in
{
'
IDLE
'
,
'
ERROR
'
,
'
DONE
'
}:
if
raas_status
in
{
'
IDLE
'
,
'
ERROR
'
,
'
DONE
'
}:
row
.
enabled
=
False
row
.
enabled
=
False
...
@@ -183,11 +143,7 @@ class RAAS_PT_simplify(RaasButtonsPanel, Panel):
...
@@ -183,11 +143,7 @@ class RAAS_PT_simplify(RaasButtonsPanel, Panel):
row
.
prop
(
context
.
window_manager
,
'
raas_progress
'
,
row
.
prop
(
context
.
window_manager
,
'
raas_progress
'
,
text
=
context
.
window_manager
.
raas_status_txt
)
text
=
context
.
window_manager
.
raas_status_txt
)
row
.
operator
(
RAAS_OT_abort
.
bl_idname
,
text
=
''
,
icon
=
'
CANCEL
'
)
row
.
operator
(
RAAS_OT_abort
.
bl_idname
,
text
=
''
,
icon
=
'
CANCEL
'
)
#elif raas_status != 'IDLE' and context.window_manager.raas_status_txt:
# row = box.row(align=True)
# row.enabled = False
# row.label(text='Status Message:')
# row.label(text=context.window_manager.raas_status_txt)
class
AuthenticatedRaasOperatorMixin
:
class
AuthenticatedRaasOperatorMixin
:
"""
Checks credentials, to be used at the start of async_execute().
"""
Checks credentials, to be used at the start of async_execute().
...
@@ -238,18 +194,6 @@ JobPriorityExt_items = [
...
@@ -238,18 +194,6 @@ JobPriorityExt_items = [
(
"
CRITICAL
"
,
"
Critical
"
,
""
,
8
),
(
"
CRITICAL
"
,
"
Critical
"
,
""
,
8
),
]
]
# JobPriorityExt_items_dict = {
# "CONFIGURING": 0,
# "VERYLOW": 1,
# "LOW": 2,
# "BELOWAVERAGE": 3,
# "AVERAGE": 4,
# "ABOVEAVERAGE": 5,
# "HIGH": 6,
# "VERYHIGH": 7,
# "CRITICAL": 8,
# }
TaskStateExt_items
=
[
TaskStateExt_items
=
[
(
"
CONFIGURING
"
,
"
Configuring
"
,
""
,
1
),
(
"
CONFIGURING
"
,
"
Configuring
"
,
""
,
1
),
(
"
SUBMITTED
"
,
"
Submitted
"
,
""
,
2
),
(
"
SUBMITTED
"
,
"
Submitted
"
,
""
,
2
),
...
@@ -260,16 +204,6 @@ TaskStateExt_items = [
...
@@ -260,16 +204,6 @@ TaskStateExt_items = [
(
"
CANCELED
"
,
"
Canceled
"
,
""
,
64
),
(
"
CANCELED
"
,
"
Canceled
"
,
""
,
64
),
]
]
# RenderFormat_items = [
# ("PNG", "PNG", ""),
# ("TGA", "TGA", ""),
# ("JPEG", "JPEG", ""),
# ("BMP", "BMP", ""),
# ("EXR", "EXR", ""),
# ("TIFF", "TIFF", ""),
# ("OPEN_EXR_MULTILAYER", "OpenEXR Multilayer", ""),
# ]
RenderType_items
=
[
RenderType_items
=
[
(
"
IMAGE
"
,
"
Image
"
,
""
),
(
"
IMAGE
"
,
"
Image
"
,
""
),
(
"
ANIMATION
"
,
"
Animation
"
,
""
),
(
"
ANIMATION
"
,
"
Animation
"
,
""
),
...
@@ -286,35 +220,19 @@ JobQueue_items = [
...
@@ -286,35 +220,19 @@ JobQueue_items = [
####################################ListJobsForCurrentUser####################
####################################ListJobsForCurrentUser####################
class
RAAS_PG_BlenderJobInfo
(
PropertyGroup
):
class
RAAS_PG_BlenderJobInfo
(
PropertyGroup
):
#job_id : bpy.props.IntProperty(name="JobId")
job_name
:
bpy
.
props
.
StringProperty
(
name
=
"
JobName
"
)
job_name
:
bpy
.
props
.
StringProperty
(
name
=
"
JobName
"
)
job_email
:
bpy
.
props
.
StringProperty
(
name
=
"
Email
"
)
job_email
:
bpy
.
props
.
StringProperty
(
name
=
"
Email
"
)
#job_priority : bpy.props.EnumProperty(items=JobPriorityExt_items,name="JobPriority",default='AVERAGE')
job_project
:
bpy
.
props
.
StringProperty
(
name
=
"
Project Name
"
,
maxlen
=
25
)
job_project
:
bpy
.
props
.
StringProperty
(
name
=
"
Project Name
"
,
maxlen
=
25
)
job_walltime
:
bpy
.
props
.
IntProperty
(
name
=
"
Walltime [minutes]
"
,
default
=
30
,
min
=
1
,
max
=
2880
)
job_walltime
:
bpy
.
props
.
IntProperty
(
name
=
"
Walltime [minutes]
"
,
default
=
30
,
min
=
1
,
max
=
2880
)
job_walltime_pre
:
bpy
.
props
.
IntProperty
(
name
=
"
Walltime Preprocessing [minutes]
"
,
default
=
10
,
min
=
1
,
max
=
2880
)
job_walltime_pre
:
bpy
.
props
.
IntProperty
(
name
=
"
Walltime Preprocessing [minutes]
"
,
default
=
10
,
min
=
1
,
max
=
2880
)
job_walltime_post
:
bpy
.
props
.
IntProperty
(
name
=
"
Walltime Postprocessing [minutes]
"
,
default
=
10
,
min
=
1
,
max
=
2880
)
job_walltime_post
:
bpy
.
props
.
IntProperty
(
name
=
"
Walltime Postprocessing [minutes]
"
,
default
=
10
,
min
=
1
,
max
=
2880
)
job_queue
:
bpy
.
props
.
EnumProperty
(
items
=
JobQueue_items
,
name
=
"
Queue
"
)
job_queue
:
bpy
.
props
.
EnumProperty
(
items
=
JobQueue_items
,
name
=
"
Queue
"
)
job_nodes
:
bpy
.
props
.
IntProperty
(
name
=
"
Nodes
"
,
default
=
1
,
min
=
1
,
max
=
4
)
job_nodes
:
bpy
.
props
.
IntProperty
(
name
=
"
Nodes
"
,
default
=
1
,
min
=
1
,
max
=
4
)
# "WalltimeLimit": 600,
# "ClusterNodeTypeId": 7,
# job_walltimeLimit : bpy.props.EnumProperty(items=RenderFormat_items,name="Walltime Limit")
# job_clusterNodeTypeId : bpy.props.EnumProperty(items=RenderFormat_items,name="Cluster Node Type")
frame_start
:
bpy
.
props
.
IntProperty
(
name
=
"
FrameStart
"
)
frame_start
:
bpy
.
props
.
IntProperty
(
name
=
"
FrameStart
"
)
frame_end
:
bpy
.
props
.
IntProperty
(
name
=
"
FrameEnd
"
)
frame_end
:
bpy
.
props
.
IntProperty
(
name
=
"
FrameEnd
"
)
#frame_step : bpy.props.IntProperty(name="FrameStep")
frame_current
:
bpy
.
props
.
IntProperty
(
name
=
"
FrameCurrent
"
)
frame_current
:
bpy
.
props
.
IntProperty
(
name
=
"
FrameCurrent
"
)
#render_format : bpy.props.EnumProperty(items=RenderFormat_items,name="Format")
#raas_remote_blend_file : bpy.props.StringProperty(name="RenderInput")
#raas_remote_outdir : bpy.props.StringProperty(name="RenderOutput")
render_type
:
bpy
.
props
.
EnumProperty
(
items
=
RenderType_items
,
name
=
"
Type
"
)
render_type
:
bpy
.
props
.
EnumProperty
(
items
=
RenderType_items
,
name
=
"
Type
"
)
#raas_local_blend_dir : bpy.props.StringProperty(name="BatOutDir")
#raas_local_blend_file : bpy.props.StringProperty(name="BatOutFile")
#bat_missing_sources : bpy.props.StringProperty(name="BatMissingSources")
class
RAAS_PG_SubmittedTaskInfoExt
(
PropertyGroup
):
class
RAAS_PG_SubmittedTaskInfoExt
(
PropertyGroup
):
Id
:
bpy
.
props
.
IntProperty
(
name
=
"
Id
"
)
Id
:
bpy
.
props
.
IntProperty
(
name
=
"
Id
"
)
...
@@ -338,8 +256,6 @@ class RAAS_PG_SubmittedJobInfoExt(PropertyGroup):
...
@@ -338,8 +256,6 @@ class RAAS_PG_SubmittedJobInfoExt(PropertyGroup):
stateRen
:
bpy
.
props
.
StringProperty
(
name
=
"
State Ren
"
)
stateRen
:
bpy
.
props
.
StringProperty
(
name
=
"
State Ren
"
)
statePost
:
bpy
.
props
.
StringProperty
(
name
=
"
State Post
"
)
statePost
:
bpy
.
props
.
StringProperty
(
name
=
"
State Post
"
)
#, org.apache.etch.EtchServiceTier.types.EtchServiceTier.ClusterNodeTypeExt nodeType
#, org.apache.etch.EtchServiceTier.types.EtchServiceTier.SubmittedTaskInfoExt[] tasks
class
RAAS_UL_SubmittedJobInfoExt
(
bpy
.
types
.
UIList
):
class
RAAS_UL_SubmittedJobInfoExt
(
bpy
.
types
.
UIList
):
def
draw_item
(
self
,
context
,
layout
,
data
,
item
,
icon
,
active_data
,
active_propname
):
def
draw_item
(
self
,
context
,
layout
,
data
,
item
,
icon
,
active_data
,
active_propname
):
...
@@ -362,11 +278,9 @@ async def _ssh(key_file, destination, port1, port2):
...
@@ -362,11 +278,9 @@ async def _ssh(key_file, destination, port1, port2):
destination
,
destination
,
'
&
'
,
'
&
'
,
]
]
# '-q', '-o', 'StrictHostKeyChecking=no',
import
asyncio
import
asyncio
loop
=
asyncio
.
get_event_loop
()
loop
=
asyncio
.
get_event_loop
()
#, stdin=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
process
=
await
asyncio
.
create_subprocess_exec
(
*
cmd
,
loop
=
loop
)
process
=
await
asyncio
.
create_subprocess_exec
(
*
cmd
,
loop
=
loop
)
await
process
.
wait
()
await
process
.
wait
()
...
@@ -381,7 +295,6 @@ async def connect_to_client(context, fileTransfer, job_id: int, token: str) -> N
...
@@ -381,7 +295,6 @@ async def connect_to_client(context, fileTransfer, job_id: int, token: str) -> N
"
SessionCode
"
:
token
"
SessionCode
"
:
token
}
}
#allocated_nodes_ips = await raas_server.post("JobManagement/GetAllocatedNodesIPs", data)
info_job
=
await
raas_server
.
post
(
"
JobManagement/GetCurrentInfoForJob
"
,
data
)
info_job
=
await
raas_server
.
post
(
"
JobManagement/GetCurrentInfoForJob
"
,
data
)
all_params
=
info_job
[
'
AllParameters
'
]
all_params
=
info_job
[
'
AllParameters
'
]
allocated_nodes_ips
=
''
allocated_nodes_ips
=
''
...
@@ -413,13 +326,6 @@ async def connect_to_client(context, fileTransfer, job_id: int, token: str) -> N
...
@@ -413,13 +326,6 @@ async def connect_to_client(context, fileTransfer, job_id: int, token: str) -> N
####################################FileTransfer#############################
####################################FileTransfer#############################
#############################################################################
#############################################################################
# Define progress callback that prints the current percentage completed for the file
# def scp_progress(filename, size, sent):
# wm = bpy.context.window_manager
# wm.raas_status_txt = "Transfer of %s" % (filename)
# wm.raas_progress = round(float(sent)/float(size)*100)
#sys.stdout.write("%s\'s progress: %.2f%% \r" % (filename, float(sent)/float(size)*100) )
async
def
_scp
(
key_file
,
source
,
destination
):
async
def
_scp
(
key_file
,
source
,
destination
):
"""
Execute an scp command
"""
"""
Execute an scp command
"""
cmd
=
[
cmd
=
[
...
@@ -446,12 +352,6 @@ async def _scp(key_file, source, destination):
...
@@ -446,12 +352,6 @@ async def _scp(key_file, source, destination):
async
def
start_transfer_files
(
context
,
job_id
:
int
,
token
:
str
)
->
None
:
async
def
start_transfer_files
(
context
,
job_id
:
int
,
token
:
str
)
->
None
:
"""
Start Transfer files.
"""
"""
Start Transfer files.
"""
# if 'time_file_transfer_req' in locals():
# if time.time() - time_file_transfer_req < 10:
# raise Exception("Please wait at least 10 seconds before the requesting of the files from the server!")
# time_file_transfer_req = time.time()
data
=
{
data
=
{
"
SubmittedJobInfoId
"
:
job_id
,
"
SubmittedJobInfoId
"
:
job_id
,
"
SessionCode
"
:
token
"
SessionCode
"
:
token
...
@@ -475,7 +375,6 @@ async def end_transfer_files(context, fileTransfer, job_id: int, token: str) ->
...
@@ -475,7 +375,6 @@ async def end_transfer_files(context, fileTransfer, job_id: int, token: str) ->
"""
End Transfer files.
"""
"""
End Transfer files.
"""
key_file
=
str
(
get_ssh_key_file
())
key_file
=
str
(
get_ssh_key_file
())
#os.remove(key_file)
data
=
{
data
=
{
"
SubmittedJobInfoId
"
:
job_id
,
"
SubmittedJobInfoId
"
:
job_id
,
...
@@ -518,163 +417,6 @@ async def transfer_files_from_cluster(context, fileTransfer, job_remote_dir: str
...
@@ -518,163 +417,6 @@ async def transfer_files_from_cluster(context, fileTransfer, job_remote_dir: str
await
transfer_files
(
context
,
fileTransfer
,
job_local_dir
,
job_remote_dir
,
job_id
,
token
,
False
)
await
transfer_files
(
context
,
fileTransfer
,
job_local_dir
,
job_remote_dir
,
job_id
,
token
,
False
)
# async def transfer_files_from_cluster(context, job_remote_dir: str, job_local_dir: str, job_id: int, token: str) -> None:
# """Transfer files."""
# data = {
# "SubmittedJobInfoId": job_id,
# "SessionCode": token
# }
# fileTransfer = await raas_server.post("FileTransfer/GetFileTransferMethod", data)
# serverHostname = fileTransfer['serverHostname']
# sharedBasepath = fileTransfer['sharedBasepath']
# #protocol = fileTransfer['protocol']
# credentials = fileTransfer['credentials']
# import paramiko
# from io import StringIO
# from base64 import b64decode
# from scp import SCPClient
# ssh = None
# scp = None
# try:
# ssh = paramiko.SSHClient()
# ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# key = paramiko.RSAKey.from_private_key(StringIO(credentials['privateKey']))
# ssh.connect(serverHostname, username=credentials['username'], pkey=key)
# context.window_manager.raas_status = 'TRANSFERRING'
# scp = SCPClient(ssh.get_transport(), progress = scp_progress, socket_timeout=60.0)
# remote_path = '%s/%s' % (str(sharedBasepath), str(job_remote_dir))
# scp.get(remote_path, local_path=job_local_dir, recursive=True)
# # remote_path = '%s/stdout' % (str(sharedBasepath))
# # scp.get(remote_path, local_path=job_local_dir)
# # remote_path = '%s/stderr' % (str(sharedBasepath))
# # scp.get(remote_path, local_path=job_local_dir)
# context.window_manager.raas_status_txt = "Transfer of %s is done" % job_local_dir
# context.window_manager.raas_status = 'PARTIAL_DONE'
# scp.close()
# except Exception as e:
# print('An error occurred creating SCP client: %s: %s' % (e.__class__, e))
# if scp is not None:
# scp.close()
# if ssh is not None:
# ssh.close()
# #context.window_manager.raas_status = 'ABORTED'
# #pass
# data = {
# "SubmittedJobInfoId": job_id,
# "UsedTransferMethod": fileTransfer,
# "SessionCode": token
# }
# import json
# data_json = json.dumps(data)
# await raas_server.post_json("FileTransfer/EndFileTransfer", data_json)
# @functools.lru_cache(1)
# def _render_output_path(
# local_project_path: str,
# blend_filepath: Path,
# #raas_render_job_type: str,
# raas_job_output_strip_components: int,
# raas_job_output_path: str,
# render_image_format: str,
# #raas_render_frame_range: str,
# *,
# include_rel_path: bool = True) -> typing.Optional[PurePath]:
# """Cached version of render_output_path()
# This ensures that redraws of the Raas Render and Add-on preferences panels
# is fast.
# """
# try:
# project_path = Path(bpy.path.abspath(local_project_path)).resolve()
# except FileNotFoundError:
# # Path.resolve() will raise a FileNotFoundError if the project path doesn't exist.
# return None
# try:
# blend_abspath = blend_filepath.resolve().absolute()
# except FileNotFoundError:
# # Path.resolve() will raise a FileNotFoundError if the path doesn't exist.
# return None
# try:
# proj_rel = blend_abspath.parent.relative_to(project_path)
# except ValueError:
# return None
# output_top = PurePath(raas_job_output_path)
# # Strip off '.raas' too; we use 'xxx.raas.blend' as job file, but
# # don't want to have all the output paths ending in '.raas'.
# stem = blend_filepath.stem
# # if stem.endswith('.raas'):
# # stem = stem[:-5]
# # if raas_render_job_type == 'blender-video-chunks':
# # return output_top / ('YYYY_MM_DD_SEQ-%s.mkv' % stem)
# if include_rel_path:
# rel_parts = proj_rel.parts[raas_job_output_strip_components:]
# dir_components = output_top.joinpath(*rel_parts) / stem
# else:
# dir_components = output_top
# # Blender will have to append the file extensions by itself.
# # if is_image_type(render_image_format):
# # return dir_components / '######'
# return dir_components #/ raas_render_frame_range
# def render_output_path(context, filepath: Path = None) -> typing.Optional[PurePath]:
# """Returns the render output path to be sent to Raas.
# :param context: the Blender context (used to find Raas preferences etc.)
# :param filepath: the Path of the blend file to render, or None for the current file.
# Returns None when the current blend file is outside the project path.
# """
# scene = context.scene
# prefs = raas_pref.preferences()
# if filepath is None:
# filepath = Path(context.blend_data.filepath)
# job_output_path = prefs.raas_job_output_path
# return _render_output_path(
# prefs.raas_project_local_path,
# filepath,
# #scene.raas_render_job_type,
# prefs.raas_job_output_strip_components,
# job_output_path,
# scene.render.image_settings.file_format,
# #scene.raas_render_frame_range,
# #include_rel_path=not scene.raas_do_override_output_path,
# )
# def remove_storage_path(storage_path, bat_path) -> str:
# storage_path_pure = pathlib.PurePath(storage_path)
# bat_path_pure = pathlib.PurePath(bat_path)
# relpath = bat_path_pure.relative_to(storage_path_pure)
# return relpath.as_posix()
##################################################################################
##################################################################################
class
RAAS_OT_download_files
(
class
RAAS_OT_download_files
(
...
@@ -693,8 +435,6 @@ class RAAS_OT_download_files(
...
@@ -693,8 +435,6 @@ class RAAS_OT_download_files(
if
not
await
self
.
authenticate
(
context
):
if
not
await
self
.
authenticate
(
context
):
return
return
#blender_job_info_new = context.scene.raas_blender_job_info_new
#submitted_job_info_ext_new = context.scene.raas_submitted_job_info_ext_new
idx
=
context
.
scene
.
raas_list_jobs_index
idx
=
context
.
scene
.
raas_list_jobs_index
if
idx
!=
-
1
:
if
idx
!=
-
1
:
...
@@ -702,43 +442,23 @@ class RAAS_OT_download_files(
...
@@ -702,43 +442,23 @@ class RAAS_OT_download_files(
item
=
context
.
scene
.
raas_list_jobs
[
idx
]
item
=
context
.
scene
.
raas_list_jobs
[
idx
]
remote_storage_out
=
convert_path_to_linux
(
get_job_remote_storage
(
item
.
Name
))
remote_storage_out
=
convert_path_to_linux
(
get_job_remote_storage
(
item
.
Name
))
#local_storage_out = get_job_local_storage_out(item.Name)
local_storage_out
=
get_job_local_storage
(
item
.
Name
)
local_storage_out
=
get_job_local_storage
(
item
.
Name
)
fileTransfer
=
await
start_transfer_files
(
context
,
item
.
Id
,
self
.
token
)
fileTransfer
=
await
start_transfer_files
(
context
,
item
.
Id
,
self
.
token
)
#await transfer_files_from_cluster(context, fileTransfer, remote_storage_out, str(local_storage_out), item.Id, self.token)
remote_storage_out
=
convert_path_to_linux
(
get_job_remote_storage_out
(
item
.
Name
))
remote_storage_out
=
convert_path_to_linux
(
get_job_remote_storage_out
(
item
.
Name
))
#local_storage_out = get_job_local_storage_out(item.Name)
local_storage_out
=
get_job_local_storage
(
item
.
Name
)
local_storage_out
=
get_job_local_storage
(
item
.
Name
)
await
transfer_files_from_cluster
(
context
,
fileTransfer
,
remote_storage_out
,
str
(
local_storage_out
),
item
.
Id
,
self
.
token
)
await
transfer_files_from_cluster
(
context
,
fileTransfer
,
remote_storage_out
,
str
(
local_storage_out
),
item
.
Id
,
self
.
token
)
remote_storage_log
=
convert_path_to_linux
(
get_job_remote_storage_log
(
item
.
Name
))
remote_storage_log
=
convert_path_to_linux
(
get_job_remote_storage_log
(
item
.
Name
))
#local_storage_log = get_job_local_storage_log(item.Name)
local_storage_log
=
get_job_local_storage
(
item
.
Name
)
local_storage_log
=
get_job_local_storage
(
item
.
Name
)
await
transfer_files_from_cluster
(
context
,
fileTransfer
,
remote_storage_log
,
str
(
local_storage_log
),
item
.
Id
,
self
.
token
)
await
transfer_files_from_cluster
(
context
,
fileTransfer
,
remote_storage_log
,
str
(
local_storage_log
),
item
.
Id
,
self
.
token
)
# stdout_output = convert_path_to_linux(get_job_remote_storage_out(blender_job_info_new.job_name) / 'stdout')
# stderr_output = convert_path_to_linux(get_job_remote_storage_out(blender_job_info_new.job_name) / 'stderr')
# stdprog_output = convert_path_to_linux(get_job_remote_storage_out(blender_job_info_new.job_name) / 'stdprog')
# stdlog_output = convert_path_to_linux(get_job_remote_storage_out(blender_job_info_new.job_name) / 'stdlog')
#local_storage_log = get_job_local_storage_log(item.Name)
#local_storage_log.mkdir(parents=True,exist_ok=True)
#await transfer_files_from_cluster(context, 'stdout', str(local_storage_log / 'stdout'), item.Id, self.token)
#await transfer_files_from_cluster(context, 'stderr', str(local_storage_log / 'stderr'), item.Id, self.token)
#await transfer_files_from_cluster(context, 'stdprog', str(local_storage_log / 'stdprog'), item.Id, self.token)
#await transfer_files_from_cluster(context, 'stdlog', str(local_storage_log / 'stdlog'), item.Id, self.token)
await
end_transfer_files
(
context
,
fileTransfer
,
item
.
Id
,
self
.
token
)
await
end_transfer_files
(
context
,
fileTransfer
,
item
.
Id
,
self
.
token
)
except
Exception
:
# as e:
except
Exception
:
#print('Problem with downloading files:')
#print(e)
import
traceback
import
traceback
traceback
.
print_exc
()
traceback
.
print_exc
()
...
@@ -770,9 +490,7 @@ class RAAS_OT_connect_to_client(
...
@@ -770,9 +490,7 @@ class RAAS_OT_connect_to_client(
await
connect_to_client
(
context
,
fileTransfer
,
item
.
Id
,
self
.
token
)
await
connect_to_client
(
context
,
fileTransfer
,
item
.
Id
,
self
.
token
)
except
Exception
:
# as e:
except
Exception
:
#print('Problem with downloading files:')
#print(e)
import
traceback
import
traceback
traceback
.
print_exc
()
traceback
.
print_exc
()
...
@@ -786,10 +504,8 @@ class RAAS_OT_prepare_files(
...
@@ -786,10 +504,8 @@ class RAAS_OT_prepare_files(
bl_idname
=
'
raas.prepare_files
'
bl_idname
=
'
raas.prepare_files
'
bl_label
=
'
Prepare Files
'
bl_label
=
'
Prepare Files
'
#stop_upon_exception = True
log
=
logging
.
getLogger
(
'
%s.RAAS_OT_prepare_files
'
%
__name__
)
log
=
logging
.
getLogger
(
'
%s.RAAS_OT_prepare_files
'
%
__name__
)
# quit_after_submit = BoolProperty()
async
def
async_execute
(
self
,
context
):
async
def
async_execute
(
self
,
context
):
# Refuse to start if the file hasn't been saved. It's okay if
# Refuse to start if the file hasn't been saved. It's okay if
...
@@ -812,42 +528,17 @@ class RAAS_OT_prepare_files(
...
@@ -812,42 +528,17 @@ class RAAS_OT_prepare_files(
prefs
=
raas_pref
.
preferences
()
prefs
=
raas_pref
.
preferences
()
# Determine where the render output will be stored.
# render_output = render_output_path(context, filepath)
# if render_output is None:
# self.report({'ERROR'}, 'Current file is outside of project path.')
# self.quit()
# return
# self.log.info('Will output render files to %s', render_output)
if
context
.
scene
.
raas_blender_job_info_new
.
job_project
is
None
or
len
(
context
.
scene
.
raas_blender_job_info_new
.
job_project
)
==
0
:
if
context
.
scene
.
raas_blender_job_info_new
.
job_project
is
None
or
len
(
context
.
scene
.
raas_blender_job_info_new
.
job_project
)
==
0
:
context
.
scene
.
raas_blender_job_info_new
.
job_project
=
filepath_stem
context
.
scene
.
raas_blender_job_info_new
.
job_project
=
filepath_stem
# BAT-pack the files to the destination directory.
# BAT-pack the files to the destination directory.
#job_id = '%d' % context.scene.raas_list_jobs[context.scene.raas_list_jobs_index].id
outdir
,
outfile
,
missing_sources
,
unique_dir
,
filepath_stem
=
await
self
.
bat_pack
(
filepath
,
context
.
scene
.
raas_blender_job_info_new
.
job_project
)
outdir
,
outfile
,
missing_sources
,
unique_dir
,
filepath_stem
=
await
self
.
bat_pack
(
filepath
,
context
.
scene
.
raas_blender_job_info_new
.
job_project
)
#context.scene.raas_blender_job_info_new.raas_local_blend_dir = '%s' % outdir
#context.scene.raas_blender_job_info_new.raas_local_blend_file = '%s' % outfile
#context.scene.raas_blender_job_info_new.bat_missing_sources = '%s' % missing_sources
#context.scene.raas_blender_job_info_new.frame_step = context.scene.frame_step
context
.
scene
.
raas_blender_job_info_new
.
frame_start
=
context
.
scene
.
frame_start
context
.
scene
.
raas_blender_job_info_new
.
frame_start
=
context
.
scene
.
frame_start
context
.
scene
.
raas_blender_job_info_new
.
frame_end
=
context
.
scene
.
frame_end
context
.
scene
.
raas_blender_job_info_new
.
frame_end
=
context
.
scene
.
frame_end
context
.
scene
.
raas_blender_job_info_new
.
frame_current
=
context
.
scene
.
frame_current
context
.
scene
.
raas_blender_job_info_new
.
frame_current
=
context
.
scene
.
frame_current
# if not outfile:
# return
#context.scene.raas_blender_job_info_new.raas_remote_blend_file = remove_storage_path(prefs.raas_job_storage_path, outfile)
# if context.scene.raas_do_override_output_path:
# ro = context.scene.raas_override_output_path
# else:
#ro = render_output
#context.scene.raas_blender_job_info_new.raas_remote_outdir = remove_storage_path(prefs.raas_job_output_path, ro)
context
.
scene
.
raas_blender_job_info_new
.
job_name
=
unique_dir
context
.
scene
.
raas_blender_job_info_new
.
job_name
=
unique_dir
...
@@ -860,8 +551,6 @@ class RAAS_OT_prepare_files(
...
@@ -860,8 +551,6 @@ class RAAS_OT_prepare_files(
self
.
report
({
'
WARNING
'
},
'
Raas job created with missing files: %s
'
%
self
.
report
({
'
WARNING
'
},
'
Raas job created with missing files: %s
'
%
'
;
'
.
join
(
names
'
;
'
.
join
(
names
))
))
# else:
# self.report({'INFO'}, 'Raas job created.')
await
CreateJob
(
context
,
self
.
token
)
await
CreateJob
(
context
,
self
.
token
)
...
@@ -882,12 +571,6 @@ class RAAS_OT_prepare_files(
...
@@ -882,12 +571,6 @@ class RAAS_OT_prepare_files(
self
.
quit
()
self
.
quit
()
# def quit(self):
# if bpy.context.window_manager.raas_status != 'ABORTED':
# bpy.context.window_manager.raas_status = 'DONE'
# super().quit()
async
def
_save_blendfile
(
self
,
context
):
async
def
_save_blendfile
(
self
,
context
):
"""
Save to a different file, specifically for Raas.
"""
Save to a different file, specifically for Raas.
...
@@ -961,46 +644,13 @@ class RAAS_OT_prepare_files(
...
@@ -961,46 +644,13 @@ class RAAS_OT_prepare_files(
prefs
=
raas_pref
.
preferences
()
prefs
=
raas_pref
.
preferences
()
#proj_abspath = bpy.path.abspath(prefs.raas_project_local_path)
proj_abspath
=
bpy
.
path
.
abspath
(
'
//./
'
)
proj_abspath
=
bpy
.
path
.
abspath
(
'
//./
'
)
projdir
=
Path
(
proj_abspath
).
resolve
()
projdir
=
Path
(
proj_abspath
).
resolve
()
exclusion_filter
=
''
#(prefs.raas_exclude_filter or '').strip()
exclusion_filter
=
''
relative_only
=
False
#prefs.raas_relative_only
relative_only
=
False
self
.
log
.
debug
(
'
projdir: %s
'
,
projdir
)
self
.
log
.
debug
(
'
projdir: %s
'
,
projdir
)
# if any(prefs.raas_job_storage_path.startswith(scheme) for scheme in SHAMAN_URL_SCHEMES):
# endpoint, _ = bat_interface.parse_shaman_endpoint(prefs.raas_job_storage_path)
# self.log.info('Sending BAT pack to Shaman at %s', endpoint)
# try:
# outfile, missing_sources = await bat_interface.copy(
# bpy.context, filepath, projdir, '/', exclusion_filter,
# packer_class=bat_interface.ShamanPacker,
# relative_only=relative_only,
# endpoint=endpoint,
# checkout_id=job_id,
# manager_id=prefs.raas_manager.manager,
# )
# except bat_interface.FileTransferError as ex:
# self.log.error('Could not transfer %d files, starting with %s',
# len(ex.files_remaining), ex.files_remaining[0])
# self.report({'ERROR'}, 'Unable to transfer %d files' % len(ex.files_remaining))
# self.quit()
# return None, None, []
# except bat_interface.Aborted:
# self.log.warning('BAT Pack was aborted')
# self.report({'WARNING'}, 'Aborted Raas file packing/transferring')
# self.quit()
# return None, None, []
# bpy.context.window_manager.raas_status = 'DONE'
# outfile = PurePath('{shaman}') / outfile
# return None, outfile, missing_sources
# Create a unique directory that is still more or less identifyable.
# This should work better than a random ID.
# unique_dir = '%s-%s-%s' % (datetime.now().isoformat('-').replace(':', ''),
# filepath.stem, project)
dt
=
datetime
.
now
().
isoformat
(
'
-
'
).
replace
(
'
:
'
,
''
).
replace
(
'
.
'
,
''
)
dt
=
datetime
.
now
().
isoformat
(
'
-
'
).
replace
(
'
:
'
,
''
).
replace
(
'
.
'
,
''
)
unique_dir
=
'
%s-%s
'
%
(
dt
[
0
:
19
],
project
)
unique_dir
=
'
%s-%s
'
%
(
dt
[
0
:
19
],
project
)
...
@@ -1036,29 +686,6 @@ class RAAS_OT_prepare_files(
...
@@ -1036,29 +686,6 @@ class RAAS_OT_prepare_files(
bpy
.
context
.
window_manager
.
raas_status
=
'
PARTIAL_DONE
'
bpy
.
context
.
window_manager
.
raas_status
=
'
PARTIAL_DONE
'
return
outdir
,
outfile
,
missing_sources
,
unique_dir
,
filepath
.
stem
return
outdir
,
outfile
,
missing_sources
,
unique_dir
,
filepath
.
stem
# class RAAS_OT_transfer_files(
# async_loop.AsyncModalOperatorMixin,
# AuthenticatedRaasOperatorMixin,
# Operator):
# """transfer_files"""
# bl_idname = 'raas.transfer_files'
# bl_label = 'Transfer Files'
# stop_upon_exception = True
# log = logging.getLogger('%s.RAAS_OT_transfer_files' % __name__)
# # quit_after_submit = BoolProperty()
# async def async_execute(self, context):
# if not await self.authenticate(context):
# return
# blender_job_info_new = context.scene.raas_blender_job_info_new
# submitted_job_info_ext_new = context.scene.raas_submitted_job_info_ext_new
# await transfer_files_to_cluster(context, blender_job_info_new.raas_local_blend_dir, blender_job_info_new.raas_remote_outdir, submitted_job_info_ext_new.Id, self.token)
# self.quit()
class
RAAS_OT_abort
(
Operator
):
class
RAAS_OT_abort
(
Operator
):
"""
Aborts a running Raas file packing/transfer operation.
"""
Aborts a running Raas file packing/transfer operation.
...
@@ -1130,50 +757,19 @@ class RAAS_PT_NewJob(RaasButtonsPanel, Panel):
...
@@ -1130,50 +757,19 @@ class RAAS_PT_NewJob(RaasButtonsPanel, Panel):
else
:
else
:
layout
.
enabled
=
False
layout
.
enabled
=
False
#prefs = raas_pref.preferences()
#################################################
#################################################
box
=
layout
.
box
()
box
=
layout
.
box
()
# if context.window_manager.raas_status in {'IDLE', 'DONE'}:
# box.enabled = True
# else:
# box.enabled = False
paths_layout
=
box
.
column
(
align
=
True
)
paths_layout
=
box
.
column
(
align
=
True
)
blender_job_info_new
=
context
.
scene
.
raas_blender_job_info_new
blender_job_info_new
=
context
.
scene
.
raas_blender_job_info_new
# if blender_job_info_new.raas_local_blend_dir:
# outdir_row = paths_layout.split(**raas_pref.factor(0.25), align=True)
# outdir_row.label(text='Local Dir:')
# outdir_label_row = outdir_row.row(align=True)
# outdir_label_row.label(text=blender_job_info_new.raas_local_blend_dir)
# if blender_job_info_new.raas_local_blend_file:
# outfile_row = paths_layout.split(**raas_pref.factor(0.25), align=True)
# outfile_row.label(text='Local File:')
# outfile_label_row = outfile_row.row(align=True)
# outfile_label_row.label(text=blender_job_info_new.raas_local_blend_file)
# # if blender_job_info_new.bat_missing_sources:
# # missing_sources_row = paths_layout.split(**raas_pref.factor(0.25), align=True)
# # missing_sources_row.label(text='Blender Missing Sources:')
# # missing_sources_label_row = missing_sources_row.row(align=True)
# # missing_sources_label_row.label(text=blender_job_info_new.bat_missing_sources)
# if blender_job_info_new.raas_local_blend_dir is None:
# return
blender_job_info_new
=
context
.
scene
.
raas_blender_job_info_new
blender_job_info_new
=
context
.
scene
.
raas_blender_job_info_new
job_info_col
=
paths_layout
.
column
()
job_info_col
=
paths_layout
.
column
()
# job_info_col.prop(blender_job_info_new, 'job_name')
# job_info_col.prop(blender_job_info_new, 'job_priority')
job_info_col
.
prop
(
blender_job_info_new
,
'
job_queue
'
)
job_info_col
.
prop
(
blender_job_info_new
,
'
job_queue
'
)
#job_info_col.prop(blender_job_info_new, 'render_format')
if
blender_job_info_new
.
job_queue
==
'
MIC
'
or
blender_job_info_new
.
job_queue
==
'
MICBUFFER
'
or
blender_job_info_new
.
job_queue
==
'
MICRES
'
:
if
blender_job_info_new
.
job_queue
==
'
MIC
'
or
blender_job_info_new
.
job_queue
==
'
MICBUFFER
'
or
blender_job_info_new
.
job_queue
==
'
MICRES
'
:
job_info_col
.
prop
(
blender_job_info_new
,
'
job_project
'
)
job_info_col
.
prop
(
blender_job_info_new
,
'
job_project
'
)
job_info_col
.
prop
(
blender_job_info_new
,
'
job_email
'
)
job_info_col
.
prop
(
blender_job_info_new
,
'
job_email
'
)
...
@@ -1207,85 +803,18 @@ class RAAS_PT_NewJob(RaasButtonsPanel, Panel):
...
@@ -1207,85 +803,18 @@ class RAAS_PT_NewJob(RaasButtonsPanel, Panel):
col
.
prop
(
blender_job_info_new
,
'
job_walltime
'
)
col
.
prop
(
blender_job_info_new
,
'
job_walltime
'
)
if
blender_job_info_new
.
render_type
==
'
IMAGE
'
:
if
blender_job_info_new
.
render_type
==
'
IMAGE
'
:
#frame_row = job_info_col.split(**raas_pref.factor(0.25), align=True)
#frame_row.label(text='Frame:')
#frame_current_label_row = frame_row.box() #.row(align=True)
#frame_current_label_row.label(text=('%d' % context.scene.frame_current))
#frame_start_label_row = frame_row.row(align=True)
col
=
job_info_col
.
column
(
align
=
True
)
col
=
job_info_col
.
column
(
align
=
True
)
col
.
prop
(
context
.
scene
,
'
frame_current
'
)
col
.
prop
(
context
.
scene
,
'
frame_current
'
)
else
:
else
:
#frame_row = job_info_col.split(**raas_pref.factor(0.25), align=True)
#frame_row.label(text='Frames:')
col
=
job_info_col
.
column
(
align
=
True
)
col
=
job_info_col
.
column
(
align
=
True
)
# col.prop(context.scene, 'frame_start',slider=True)
# col.prop(context.scene, 'frame_end',slider=True)
# col.prop(context.scene, 'frame_step',slider=True)
col
.
prop
(
context
.
scene
,
"
frame_start
"
,
text
=
"
Frame Start
"
)
col
.
prop
(
context
.
scene
,
"
frame_start
"
,
text
=
"
Frame Start
"
)
col
.
prop
(
context
.
scene
,
"
frame_end
"
,
text
=
"
End
"
)
col
.
prop
(
context
.
scene
,
"
frame_end
"
,
text
=
"
End
"
)
#col.prop(context.scene, "frame_step", text="Step")
#frame_start_label_row = frame_row.row(align=True)
#frame_row.label(text=('%d' % context.scene.frame_start))
#frame_start_label_row.prop(blender_job_info_new, 'frame_start', text='Start')
#frame_end_label_row = frame_row.row(align=True)
#frame_row.label(text=('%d' % context.scene.frame_end))
#frame_start_label_row.prop(blender_job_info_new, 'frame_end', text='End')
# col = job_info_col.column(align=True)
# col.prop(context.scene.render, "resolution_x", text="Resolution X")
# col.prop(context.scene.render, "resolution_y", text="Resolution Y")
# col.prop(context.scene.render, "resolution_percentage", text="%")
# if context.scene.cycles.progressive == 'PATH' or context.scene.progressive == 'BRANCHED_PATH' is False:
# col = job_info_col.column(align=True)
# col.prop(context.scene.cycles, "samples", text="Samples")
# else:
# col = job_info_col.column(align=True)
# col.prop(context.scene.cycles, "aa_samples", text="Samples")
# job_info_col.prop(blender_job_info_new, 'raas_remote_blend_file', text='Remote Blend File')
# job_info_col.prop(blender_job_info_new, 'render_output', text='Remote Out Dir')
#job_info_col.operator(RAAS_OT_CreateJob.bl_idname, text='Create Job')
#submitted_job_info_ext_new = context.scene.raas_submitted_job_info_ext_new
#box.enabled = False
# if submitted_job_info_ext_new.Id != 0:
# box = layout.box()
# box.label(text=('Job: %d' % submitted_job_info_ext_new.Id))
#box.operator(RAAS_OT_transfer_files.bl_idname, text='Transfer files to remote cluster')
#box = layout.box()
#box.label(text=('Job: %d' % submitted_job_info_ext_new.Id))
#box.operator(RAAS_OT_SubmitJob.bl_idname, text='Submit job')
box
.
operator
(
RAAS_OT_prepare_files
.
bl_idname
,
box
.
operator
(
RAAS_OT_prepare_files
.
bl_idname
,
text
=
'
Submit Job
'
,
text
=
'
Submit Job
'
,
icon
=
'
RENDER_ANIMATION
'
)
icon
=
'
RENDER_ANIMATION
'
)
# def draw_odd_size_warning(self, layout, render):
# render_width = render.resolution_x * render.resolution_percentage // 100
# render_height = render.resolution_y * render.resolution_percentage // 100
# odd_width = render_width % 2
# odd_height = render_height % 2
# if not odd_width and not odd_height:
# return False
# box = layout.box()
# box.alert = True
# if odd_width and odd_height:
# msg = 'Both X (%d) and Y (%d) resolution are' % (render_width, render_height)
# elif odd_width:
# msg = 'X resolution (%d) is' % render_width
# else:
# msg = 'Y resolution (%d) is' % render_height
# box.label(text=msg + ' not divisible by 2.', icon='ERROR')
# box.label(text='Any video rendered from these frames will be padded with black pixels.')
##########################################################################
##########################################################################
async
def
GetCurrentInfoForJob
(
context
,
job_id
:
int
,
token
:
str
)
->
None
:
async
def
GetCurrentInfoForJob
(
context
,
job_id
:
int
,
token
:
str
)
->
None
:
...
@@ -1298,14 +827,6 @@ async def GetCurrentInfoForJob(context, job_id: int, token: str) -> None:
...
@@ -1298,14 +827,6 @@ async def GetCurrentInfoForJob(context, job_id: int, token: str) -> None:
info_job
=
await
raas_server
.
post
(
"
JobManagement/GetCurrentInfoForJob
"
,
data
)
info_job
=
await
raas_server
.
post
(
"
JobManagement/GetCurrentInfoForJob
"
,
data
)
# tasks = info_job['tasks']
# for task in tasks:
# all_params = task['allParameters']
# for line in all_params.split('\n'):
# print(line)
# if "job_state" in line:
# print(task['id'], ': ', line)
# #break
return
info_job
return
info_job
...
@@ -1333,52 +854,11 @@ async def ListJobsForCurrentUser(context, token):
...
@@ -1333,52 +854,11 @@ async def ListJobsForCurrentUser(context, token):
resp_json
=
await
raas_server
.
post
(
"
JobManagement/ListJobsForCurrentUser
"
,
data
)
resp_json
=
await
raas_server
.
post
(
"
JobManagement/ListJobsForCurrentUser
"
,
data
)
#context.scene.raas_list_jobs_index = -1
context
.
scene
.
raas_list_jobs
.
clear
()
context
.
scene
.
raas_list_jobs
.
clear
()
for
key
in
resp_json
:
for
key
in
resp_json
:
item
=
context
.
scene
.
raas_list_jobs
.
add
()
item
=
context
.
scene
.
raas_list_jobs
.
add
()
raas_server
.
fill_items
(
item
,
key
)
raas_server
.
fill_items
(
item
,
key
)
# item.Id = key['id']
# item.Name = key['name']
# item.state = JobStateExt_items[key['state']][0]
# item.Priority = JobPriorityExt_items[key['Priority']][0]
# item.project = key['project']
# item.creationTime = key['creationTime']
# item.submitTime = key['submitTime']
# item.startTime = key['startTime']
# item.endTime = key['endTime']
# item.totalAllocatedTime = key['totalAllocatedTime']
# item.allParameters = key['allParameters']
# all_params = item.allParameters
# array_state_count = []
# for line in all_params.split('\n'):
# if "array_state_count" in line:
# array_state_count.append(line.replace("array_state_count: ", ""))
# if len(array_state_count) == 1:
# item.statePre = array_state_count[0]
# item.stateRen = array_state_count[0]
# item.statePost = array_state_count[0]
# if len(array_state_count) == 3:
# item.statePre = array_state_count[0]
# item.stateRen = array_state_count[1]
# item.statePost = array_state_count[2]
# if len(item.submitTime) > 19:
# item.submitTime = item.submitTime[0:19]
# if len(item.endTime) > 19:
# item.endTime = item.endTime[0:19]
# if len(item.submitTime) > 0:
# item.submitTime = item.submitTime.replace("T", " ")
# if len(item.endTime) > 0:
# item.endTime = item.endTime.replace("T", " ")
if
context
.
scene
.
raas_list_jobs_index
>
len
(
context
.
scene
.
raas_list_jobs
)
-
1
:
if
context
.
scene
.
raas_list_jobs_index
>
len
(
context
.
scene
.
raas_list_jobs
)
-
1
:
context
.
scene
.
raas_list_jobs_index
=
len
(
context
.
scene
.
raas_list_jobs
)
-
1
context
.
scene
.
raas_list_jobs_index
=
len
(
context
.
scene
.
raas_list_jobs
)
-
1
...
@@ -1398,9 +878,6 @@ class RAAS_OT_ListJobsForCurrentUser(
...
@@ -1398,9 +878,6 @@ class RAAS_OT_ListJobsForCurrentUser(
await
ListJobsForCurrentUser
(
context
,
self
.
token
)
await
ListJobsForCurrentUser
(
context
,
self
.
token
)
# item = context.scene.raas_list_jobs[context.scene.raas_list_jobs_index]
# await GetCurrentInfoForJob(context, item.Id, self.token)
self
.
quit
()
self
.
quit
()
async
def
CreateJobMIC
(
context
,
token
):
async
def
CreateJobMIC
(
context
,
token
):
...
@@ -1410,17 +887,13 @@ async def CreateJobMIC(context, token):
...
@@ -1410,17 +887,13 @@ async def CreateJobMIC(context, token):
filenamext
=
os
.
path
.
basename
(
context
.
blend_data
.
filepath
)
filenamext
=
os
.
path
.
basename
(
context
.
blend_data
.
filepath
)
filename
=
os
.
path
.
splitext
(
filenamext
)
filename
=
os
.
path
.
splitext
(
filenamext
)
filename
=
filename
[
0
]
filename
=
filename
[
0
]
#blend_file = convert_path_to_linux(get_job_remote_storage_in(blender_job_info_new.job_name) / filename) + '.blend'
#render_output = convert_path_to_linux(get_job_remote_storage_out(blender_job_info_new.job_name) / '######')
job_nodes
=
blender_job_info_new
.
job_nodes
*
24
job_nodes
=
blender_job_info_new
.
job_nodes
*
24
#jobs = []
job
=
None
job
=
None
if
blender_job_info_new
.
render_type
==
'
IMAGE
'
:
if
blender_job_info_new
.
render_type
==
'
IMAGE
'
:
job_arrays
=
None
#'%d-%d:%d' % (blender_job_info_new.frame_current, blender_job_info_new.frame_current + 1, 1)
job_arrays
=
None
frame_start
=
blender_job_info_new
.
frame_current
frame_start
=
blender_job_info_new
.
frame_current
frame_end
=
blender_job_info_new
.
frame_current
frame_end
=
blender_job_info_new
.
frame_current
command_template_id_ren
=
13
command_template_id_ren
=
13
...
@@ -1432,10 +905,9 @@ async def CreateJobMIC(context, token):
...
@@ -1432,10 +905,9 @@ async def CreateJobMIC(context, token):
command_template_id_ren
=
7
command_template_id_ren
=
7
blender_param_ren
=
str
(
filename
)
+
'
.blend
'
blender_param_ren
=
str
(
filename
)
+
'
.blend
'
#blender_param = '../%s --render-output ../%s --render-format %s' % (blend_file, render_output, blender_job_info_new.render_format)
blender_param
=
str
(
filename
)
+
'
.blend
'
blender_param
=
str
(
filename
)
+
'
.blend
'
job_name
=
blender_job_info_new
.
job_name
#'%s_%06d' % (blender_job_info_new.job_name, frame)
job_name
=
blender_job_info_new
.
job_name
job_walltime_pre
=
blender_job_info_new
.
job_walltime_pre
*
60
job_walltime_pre
=
blender_job_info_new
.
job_walltime_pre
*
60
job_walltime_ren
=
blender_job_info_new
.
job_walltime
*
60
job_walltime_ren
=
blender_job_info_new
.
job_walltime
*
60
...
@@ -1598,156 +1070,6 @@ async def CreateJobMIC(context, token):
...
@@ -1598,156 +1070,6 @@ async def CreateJobMIC(context, token):
item
=
context
.
scene
.
raas_submitted_job_info_ext_new
item
=
context
.
scene
.
raas_submitted_job_info_ext_new
raas_server
.
fill_items
(
item
,
resp_json
)
raas_server
.
fill_items
(
item
,
resp_json
)
# async def CreateJobMICRES(context, token):
# blender_job_info_new = context.scene.raas_blender_job_info_new
# filenamext = os.path.basename(context.blend_data.filepath)
# filename = os.path.splitext(filenamext)
# filename = filename[0]
# #blend_file = convert_path_to_linux(get_job_remote_storage_in(blender_job_info_new.job_name) / filename) + '.blend'
# #render_output = convert_path_to_linux(get_job_remote_storage_out(blender_job_info_new.job_name) / '######')
# job_nodes = blender_job_info_new.job_nodes * 24
# #jobs = []
# job = None
# # if blender_job_info_new.render_type == 'IMAGE':
# # job_arrays = None #'%d-%d:%d' % (blender_job_info_new.frame_current, blender_job_info_new.frame_current + 1, 2)
# # else:
# # job_arrays = '%d-%d:%d' % (blender_job_info_new.frame_start, blender_job_info_new.frame_end, 1)
# if blender_job_info_new.render_type == 'IMAGE':
# job_arrays = None #'%d-%d:%d' % (blender_job_info_new.frame_current, blender_job_info_new.frame_current + 1, 2)
# frame_start = blender_job_info_new.frame_current
# frame_end = blender_job_info_new.frame_current
# else:
# job_arrays = '%d-%d:%d' % (blender_job_info_new.frame_start, blender_job_info_new.frame_end, 1)
# frame_start = blender_job_info_new.frame_start
# frame_end = blender_job_info_new.frame_end
# #blender_param = '../%s --render-output ../%s --render-format %s' % (blend_file, render_output, blender_job_info_new.render_format)
# blender_param = str(filename) + '.blend'
# job_name = blender_job_info_new.job_name #'%s_%06d' % (blender_job_info_new.job_name, frame)
# job_walltime_pre = blender_job_info_new.job_walltime_pre * 60
# job_walltime_ren = blender_job_info_new.job_walltime * 60
# job_walltime_post = blender_job_info_new.job_walltime_post * 60
# name_pre = '%s_pre' % (blender_job_info_new.job_name)
# name_ren = '%s_ren' % (blender_job_info_new.job_name)
# name_pos = '%s_pos' % (blender_job_info_new.job_name)
# job = {
# "Name": blender_job_info_new.job_name,
# "MinCores": 24,
# "MaxCores": job_nodes,
# "Priority": 4,
# "Project": blender_job_info_new.job_project,
# "ClusterNodeTypeId": 6,
# "FileTransferMethodId": 2,
# "ClusterId": 2,
# "EnvironmentVariables": None,
# "WaitingLimit": 0,
# "WalltimeLimit": job_walltime_ren,
# "Tasks": [
# {
# "Name": name_pre,
# "MinCores": 24,
# "MaxCores": 24,
# "WalltimeLimit": job_walltime_pre,
# "StandardOutputFile": 'stdout',
# "StandardErrorFile": 'stderr',
# "ProgressFile": 'stdprog',
# "LogFile": 'stdlog',
# "ClusterNodeTypeId": 6,
# "CommandTemplateId": 12,
# "Priority": 4,
# "JobArrays": job_arrays,
# "CpuHyperThreading": True,
# "TemplateParameterValues": [
# {
# "CommandParameterIdentifier": "inputParam",
# "ParameterValue": blender_param
# }
# ]
# },
# {
# "Name": name_ren,
# "MinCores": job_nodes,
# "MaxCores": job_nodes,
# "WalltimeLimit": job_walltime_ren,
# "StandardOutputFile": 'stdout',
# "StandardErrorFile": 'stderr',
# "ProgressFile": 'stdprog',
# "LogFile": 'stdlog',
# "ClusterNodeTypeId": 10,
# "CommandTemplateId": 6,
# "Priority": 4,
# "JobArrays": job_arrays,
# "DependsOnName" : name_pre,
# "TemplateParameterValues": [
# {
# "CommandParameterIdentifier": "inputParam",
# "ParameterValue": blender_param
# }
# ]
# },
# {
# "Name": name_pos,
# "MinCores": 24,
# "MaxCores": 24,
# "WalltimeLimit": job_walltime_post,
# "StandardOutputFile": 'stdout',
# "StandardErrorFile": 'stderr',
# "ProgressFile": 'stdprog',
# "LogFile": 'stdlog',
# "ClusterNodeTypeId": 6,
# "CommandTemplateId": 13,
# "Priority": 4,
# "JobArrays": job_arrays,
# "DependsOnName" : name_ren,
# "CpuHyperThreading": True,
# "EnvironmentVariables": [
# {
# "Name": "job_project",
# "Value": blender_job_info_new.job_project
# },
# {
# "Name": "job_email",
# "Value": blender_job_info_new.job_email
# },
# {
# "Name": "frame_start",
# "Value": frame_start
# },
# {
# "Name": "frame_end",
# "Value": frame_end
# }
# ],
# "TemplateParameterValues": [
# {
# "CommandParameterIdentifier": "inputParam",
# "ParameterValue": blender_param
# }
# ]
# }
# ]
# }
# data = {
# "JobSpecification": job,
# "SessionCode": token
# }
# resp_json = await raas_server.post("JobManagement/CreateJob", data)
# item = context.scene.raas_submitted_job_info_ext_new
# raas_server.fill_items(item, resp_json)
async
def
CreateJobMICBUFFER
(
context
,
token
):
async
def
CreateJobMICBUFFER
(
context
,
token
):
...
@@ -1756,22 +1078,14 @@ async def CreateJobMICBUFFER(context, token):
...
@@ -1756,22 +1078,14 @@ async def CreateJobMICBUFFER(context, token):
filenamext
=
os
.
path
.
basename
(
context
.
blend_data
.
filepath
)
filenamext
=
os
.
path
.
basename
(
context
.
blend_data
.
filepath
)
filename
=
os
.
path
.
splitext
(
filenamext
)
filename
=
os
.
path
.
splitext
(
filenamext
)
filename
=
filename
[
0
]
filename
=
filename
[
0
]
#blend_file = convert_path_to_linux(get_job_remote_storage_in(blender_job_info_new.job_name) / filename) + '.blend'
#render_output = convert_path_to_linux(get_job_remote_storage_out(blender_job_info_new.job_name) / '######')
job_nodes
=
blender_job_info_new
.
job_nodes
*
24
job_nodes
=
blender_job_info_new
.
job_nodes
*
24
#jobs = []
job
=
None
job
=
None
# if blender_job_info_new.render_type == 'IMAGE':
# job_arrays = '%d-%d:%d' % (blender_job_info_new.frame_current, blender_job_info_new.frame_current + 1, 2)
# else:
# job_arrays = '%d-%d:%d' % (blender_job_info_new.frame_start, blender_job_info_new.frame_end, 1)
if
blender_job_info_new
.
render_type
==
'
IMAGE
'
:
if
blender_job_info_new
.
render_type
==
'
IMAGE
'
:
job_arrays
=
None
#'%d-%d:%d' % (blender_job_info_new.frame_current, blender_job_info_new.frame_current + 1, 2)
job_arrays
=
None
frame_start
=
blender_job_info_new
.
frame_current
frame_start
=
blender_job_info_new
.
frame_current
frame_end
=
blender_job_info_new
.
frame_current
frame_end
=
blender_job_info_new
.
frame_current
else
:
else
:
...
@@ -1779,10 +1093,9 @@ async def CreateJobMICBUFFER(context, token):
...
@@ -1779,10 +1093,9 @@ async def CreateJobMICBUFFER(context, token):
frame_start
=
blender_job_info_new
.
frame_start
frame_start
=
blender_job_info_new
.
frame_start
frame_end
=
blender_job_info_new
.
frame_end
frame_end
=
blender_job_info_new
.
frame_end
#blender_param = '../%s --render-output ../%s --render-format %s' % (blend_file, render_output, blender_job_info_new.render_format)
blender_param
=
str
(
filename
)
+
'
.blend
'
blender_param
=
str
(
filename
)
+
'
.blend
'
job_name
=
blender_job_info_new
.
job_name
#'%s_%06d' % (blender_job_info_new.job_name, frame)
job_name
=
blender_job_info_new
.
job_name
job_walltime_pre
=
blender_job_info_new
.
job_walltime_pre
*
60
job_walltime_pre
=
blender_job_info_new
.
job_walltime_pre
*
60
job_walltime_ren
=
blender_job_info_new
.
job_walltime
*
60
job_walltime_ren
=
blender_job_info_new
.
job_walltime
*
60
...
@@ -1951,23 +1264,12 @@ async def CreateJobCPU(context, token):
...
@@ -1951,23 +1264,12 @@ async def CreateJobCPU(context, token):
filenamext
=
os
.
path
.
basename
(
context
.
blend_data
.
filepath
)
filenamext
=
os
.
path
.
basename
(
context
.
blend_data
.
filepath
)
filename
=
os
.
path
.
splitext
(
filenamext
)
filename
=
os
.
path
.
splitext
(
filenamext
)
filename
=
filename
[
0
]
filename
=
filename
[
0
]
#blend_file = convert_path_to_linux(get_job_remote_storage_in(blender_job_info_new.job_name) / filename) + '.blend'
#render_output = convert_path_to_linux(get_job_remote_storage_out(blender_job_info_new.job_name) / '######')
#job_nodes = blender_job_info_new.job_nodes * 24
job_nodes
=
24
job_nodes
=
24
#jobs = []
job
=
None
job
=
None
# if blender_job_info_new.render_type == 'IMAGE':
# job_arrays = '%d-%d:%d' % (blender_job_info_new.frame_current, blender_job_info_new.frame_current + 1, 2)
# else:
# job_arrays = '%d-%d:%d' % (blender_job_info_new.frame_start, blender_job_info_new.frame_end, 1)
if
blender_job_info_new
.
render_type
==
'
IMAGE
'
:
if
blender_job_info_new
.
render_type
==
'
IMAGE
'
:
job_arrays
=
None
#'%d-%d:%d' % (blender_job_info_new.frame_current, blender_job_info_new.frame_current + 1, 2)
job_arrays
=
None
frame_start
=
blender_job_info_new
.
frame_current
frame_start
=
blender_job_info_new
.
frame_current
frame_end
=
blender_job_info_new
.
frame_current
frame_end
=
blender_job_info_new
.
frame_current
else
:
else
:
...
@@ -1975,14 +1277,10 @@ async def CreateJobCPU(context, token):
...
@@ -1975,14 +1277,10 @@ async def CreateJobCPU(context, token):
frame_start
=
blender_job_info_new
.
frame_start
frame_start
=
blender_job_info_new
.
frame_start
frame_end
=
blender_job_info_new
.
frame_end
frame_end
=
blender_job_info_new
.
frame_end
#blender_param = '../%s --render-output ../%s --render-format %s' % (blend_file, render_output, blender_job_info_new.render_format)
blender_param
=
str
(
filename
)
+
'
.blend
'
blender_param
=
str
(
filename
)
+
'
.blend
'
job_name
=
blender_job_info_new
.
job_name
#'%s_%06d' % (blender_job_info_new.job_name, frame)
job_name
=
blender_job_info_new
.
job_name
#clusterNodeTypeId = 7 #"Production"
#commandTemplateId = 5 #"CPU_PROD"
job_walltime
=
blender_job_info_new
.
job_walltime
*
60
job_walltime
=
blender_job_info_new
.
job_walltime
*
60
...
@@ -2053,112 +1351,7 @@ async def CreateJobCPU(context, token):
...
@@ -2053,112 +1351,7 @@ async def CreateJobCPU(context, token):
item
=
context
.
scene
.
raas_submitted_job_info_ext_new
item
=
context
.
scene
.
raas_submitted_job_info_ext_new
raas_server
.
fill_items
(
item
,
resp_json
)
raas_server
.
fill_items
(
item
,
resp_json
)
# async def CreateJobCPURES(context, token):
# blender_job_info_new = context.scene.raas_blender_job_info_new
# filenamext = os.path.basename(context.blend_data.filepath)
# filename = os.path.splitext(filenamext)
# filename = filename[0]
# #blend_file = convert_path_to_linux(get_job_remote_storage_in(blender_job_info_new.job_name) / filename) + '.blend'
# #render_output = convert_path_to_linux(get_job_remote_storage_out(blender_job_info_new.job_name) / '######')
# #job_nodes = blender_job_info_new.job_nodes * 24
# job_nodes = 24
# #jobs = []
# job = None
# # if blender_job_info_new.render_type == 'IMAGE':
# # job_arrays = '%d-%d:%d' % (blender_job_info_new.frame_current, blender_job_info_new.frame_current + 1, 2)
# # else:
# # job_arrays = '%d-%d:%d' % (blender_job_info_new.frame_start, blender_job_info_new.frame_end, 1)
# if blender_job_info_new.render_type == 'IMAGE':
# job_arrays = None #'%d-%d:%d' % (blender_job_info_new.frame_current, blender_job_info_new.frame_current + 1, 2)
# frame_start = blender_job_info_new.frame_current
# frame_end = blender_job_info_new.frame_current
# else:
# job_arrays = '%d-%d:%d' % (blender_job_info_new.frame_start, blender_job_info_new.frame_end, 1)
# frame_start = blender_job_info_new.frame_start
# frame_end = blender_job_info_new.frame_end
# #blender_param = '../%s --render-output ../%s --render-format %s' % (blend_file, render_output, blender_job_info_new.render_format)
# blender_param = str(filename) + '.blend'
# job_name = blender_job_info_new.job_name #'%s_%06d' % (blender_job_info_new.job_name, frame)
# clusterNodeTypeId = 6 #CPU_PROD
# commandTemplateId = 14
# job_walltime = blender_job_info_new.job_walltime * 60
# job = {
# "Name": blender_job_info_new.job_name,
# "MinCores": job_nodes,
# "MaxCores": job_nodes,
# "Priority": 4,
# "Project": blender_job_info_new.job_project,
# "ClusterNodeTypeId": clusterNodeTypeId,
# "FileTransferMethodId": 2,
# "ClusterId": 2,
# "EnvironmentVariables": None,
# "WaitingLimit": 0,
# "WalltimeLimit": job_walltime,
# "Tasks": [
# {
# "Name": blender_job_info_new.job_name,
# "MinCores": job_nodes,
# "MaxCores": job_nodes,
# "WalltimeLimit": job_walltime,
# "StandardOutputFile": 'stdout',
# "StandardErrorFile": 'stderr',
# "ProgressFile": 'stdprog',
# "LogFile": 'stdlog',
# "ClusterNodeTypeId": clusterNodeTypeId,
# "CommandTemplateId": commandTemplateId,
# "Priority": 4,
# "JobArrays": job_arrays,
# "CpuHyperThreading": True,
# "EnvironmentVariables": [
# {
# "Name": "job_project",
# "Value": blender_job_info_new.job_project
# },
# {
# "Name": "job_email",
# "Value": blender_job_info_new.job_email
# },
# {
# "Name": "frame_start",
# "Value": frame_start
# },
# {
# "Name": "frame_end",
# "Value": frame_end
# }
# ],
# "TemplateParameterValues": [
# {
# "CommandParameterIdentifier": "inputParam",
# "ParameterValue": blender_param
# }
# ]
# }
# ]
# }
# data = {
# "JobSpecification": job,
# "SessionCode": token
# }
# resp_json = await raas_server.post("JobManagement/CreateJob", data)
# item = context.scene.raas_submitted_job_info_ext_new
# raas_server.fill_items(item, resp_json)
async
def
CreateJobMICINTERACTIVE
(
context
,
token
):
async
def
CreateJobMICINTERACTIVE
(
context
,
token
):
...
@@ -2167,32 +1360,12 @@ async def CreateJobMICINTERACTIVE(context, token):
...
@@ -2167,32 +1360,12 @@ async def CreateJobMICINTERACTIVE(context, token):
filenamext
=
os
.
path
.
basename
(
context
.
blend_data
.
filepath
)
filenamext
=
os
.
path
.
basename
(
context
.
blend_data
.
filepath
)
filename
=
os
.
path
.
splitext
(
filenamext
)
filename
=
os
.
path
.
splitext
(
filenamext
)
filename
=
filename
[
0
]
filename
=
filename
[
0
]
#blend_file = convert_path_to_linux(get_job_remote_storage_in(blender_job_info_new.job_name) / filename) + '.blend'
#render_output = convert_path_to_linux(get_job_remote_storage_out(blender_job_info_new.job_name) / '######')
job_nodes
=
blender_job_info_new
.
job_nodes
*
24
job_nodes
=
blender_job_info_new
.
job_nodes
*
24
#job_nodes = 24
#jobs = []
# job = None
# if blender_job_info_new.render_type == 'IMAGE':
# job_arrays = '%d-%d:%d' % (blender_job_info_new.frame_current, blender_job_info_new.frame_current + 1, 2)
# else:
# job_arrays = '%d-%d:%d' % (blender_job_info_new.frame_start, blender_job_info_new.frame_end, 1)
#job_arrays = '1-2:2'
# "JobArrays": job_arrays,
# #blender_param = '../%s --render-output ../%s --render-format %s' % (blend_file, render_output, blender_job_info_new.render_format)
# blender_param = str(filename) + '.blend'
client_param
=
''
client_param
=
''
job_name
=
blender_job_info_new
.
job_name
#'%s_%06d' % (blender_job_info_new.job_name, frame)
job_name
=
blender_job_info_new
.
job_name
#clusterNodeTypeId = 10
#commandTemplateId = 11
job_walltime
=
blender_job_info_new
.
job_walltime
*
60
job_walltime
=
blender_job_info_new
.
job_walltime
*
60
...
@@ -2269,22 +1442,13 @@ async def CreateJobMPP(context, token):
...
@@ -2269,22 +1442,13 @@ async def CreateJobMPP(context, token):
filenamext
=
os
.
path
.
basename
(
context
.
blend_data
.
filepath
)
filenamext
=
os
.
path
.
basename
(
context
.
blend_data
.
filepath
)
filename
=
os
.
path
.
splitext
(
filenamext
)
filename
=
os
.
path
.
splitext
(
filenamext
)
filename
=
filename
[
0
]
filename
=
filename
[
0
]
#blend_file = convert_path_to_linux(get_job_remote_storage_in(blender_job_info_new.job_name) / filename) + '.blend'
#render_output = convert_path_to_linux(get_job_remote_storage_out(blender_job_info_new.job_name) / '######')
job_nodes
=
blender_job_info_new
.
job_nodes
*
24
job_nodes
=
blender_job_info_new
.
job_nodes
*
24
#jobs = []
job
=
None
job
=
None
# if blender_job_info_new.render_type == 'IMAGE':
# job_arrays = '%d-%d:%d' % (blender_job_info_new.frame_current, blender_job_info_new.frame_current + 1, 2)
# else:
# job_arrays = '%d-%d:%d' % (blender_job_info_new.frame_start, blender_job_info_new.frame_end, 1)
if
blender_job_info_new
.
render_type
==
'
IMAGE
'
:
if
blender_job_info_new
.
render_type
==
'
IMAGE
'
:
job_arrays
=
None
#'%d-%d:%d' % (blender_job_info_new.frame_current, blender_job_info_new.frame_current + 1, 2)
job_arrays
=
None
frame_start
=
blender_job_info_new
.
frame_current
frame_start
=
blender_job_info_new
.
frame_current
frame_end
=
blender_job_info_new
.
frame_current
frame_end
=
blender_job_info_new
.
frame_current
else
:
else
:
...
@@ -2292,10 +1456,9 @@ async def CreateJobMPP(context, token):
...
@@ -2292,10 +1456,9 @@ async def CreateJobMPP(context, token):
frame_start
=
blender_job_info_new
.
frame_start
frame_start
=
blender_job_info_new
.
frame_start
frame_end
=
blender_job_info_new
.
frame_end
frame_end
=
blender_job_info_new
.
frame_end
#blender_param = '../%s --render-output ../%s --render-format %s' % (blend_file, render_output, blender_job_info_new.render_format)
blender_param
=
str
(
filename
)
+
'
.blend
'
blender_param
=
str
(
filename
)
+
'
.blend
'
job_name
=
blender_job_info_new
.
job_name
#'%s_%06d' % (blender_job_info_new.job_name, frame)
job_name
=
blender_job_info_new
.
job_name
job_walltime_pre
=
blender_job_info_new
.
job_walltime_pre
*
60
job_walltime_pre
=
blender_job_info_new
.
job_walltime_pre
*
60
job_walltime_ren
=
blender_job_info_new
.
job_walltime
*
60
job_walltime_ren
=
blender_job_info_new
.
job_walltime
*
60
...
@@ -2458,157 +1621,7 @@ async def CreateJobMPP(context, token):
...
@@ -2458,157 +1621,7 @@ async def CreateJobMPP(context, token):
item
=
context
.
scene
.
raas_submitted_job_info_ext_new
item
=
context
.
scene
.
raas_submitted_job_info_ext_new
raas_server
.
fill_items
(
item
,
resp_json
)
raas_server
.
fill_items
(
item
,
resp_json
)
# async def CreateJobMPPRES(context, token):
# blender_job_info_new = context.scene.raas_blender_job_info_new
# filenamext = os.path.basename(context.blend_data.filepath)
# filename = os.path.splitext(filenamext)
# filename = filename[0]
# #blend_file = convert_path_to_linux(get_job_remote_storage_in(blender_job_info_new.job_name) / filename) + '.blend'
# #render_output = convert_path_to_linux(get_job_remote_storage_out(blender_job_info_new.job_name) / '######')
# job_nodes = blender_job_info_new.job_nodes * 24
# #jobs = []
# job = None
# # if blender_job_info_new.render_type == 'IMAGE':
# # job_arrays = '%d-%d:%d' % (blender_job_info_new.frame_current, blender_job_info_new.frame_current + 1, 2)
# # else:
# # job_arrays = '%d-%d:%d' % (blender_job_info_new.frame_start, blender_job_info_new.frame_end, 1)
# if blender_job_info_new.render_type == 'IMAGE':
# job_arrays = None #'%d-%d:%d' % (blender_job_info_new.frame_current, blender_job_info_new.frame_current + 1, 2)
# frame_start = blender_job_info_new.frame_current
# frame_end = blender_job_info_new.frame_current
# else:
# job_arrays = '%d-%d:%d' % (blender_job_info_new.frame_start, blender_job_info_new.frame_end, 1)
# frame_start = blender_job_info_new.frame_start
# frame_end = blender_job_info_new.frame_end
# #blender_param = '../%s --render-output ../%s --render-format %s' % (blend_file, render_output, blender_job_info_new.render_format)
# blender_param = str(filename) + '.blend'
# job_name = blender_job_info_new.job_name #'%s_%06d' % (blender_job_info_new.job_name, frame)
# job_walltime_pre = blender_job_info_new.job_walltime_pre * 60
# job_walltime_ren = blender_job_info_new.job_walltime * 60
# job_walltime_post = blender_job_info_new.job_walltime_post * 60
# name_pre = '%s_pre' % (blender_job_info_new.job_name)
# name_ren = '%s_ren' % (blender_job_info_new.job_name)
# name_pos = '%s_pos' % (blender_job_info_new.job_name)
# job = {
# "Name": blender_job_info_new.job_name,
# "MinCores": 24,
# "MaxCores": job_nodes,
# "Priority": 4,
# "Project": blender_job_info_new.job_project,
# "ClusterNodeTypeId": 6,
# "FileTransferMethodId": 2,
# "ClusterId": 2,
# "EnvironmentVariables": None,
# "WaitingLimit": 0,
# "WalltimeLimit": job_walltime_ren,
# "Tasks": [
# {
# "Name": name_pre,
# "MinCores": 24,
# "MaxCores": 24,
# "WalltimeLimit": job_walltime_pre,
# "StandardOutputFile": 'stdout',
# "StandardErrorFile": 'stderr',
# "ProgressFile": 'stdprog',
# "LogFile": 'stdlog',
# "ClusterNodeTypeId": 6,
# "CommandTemplateId": 12,
# "Priority": 4,
# "JobArrays": job_arrays,
# "CpuHyperThreading": True,
# "TemplateParameterValues": [
# {
# "CommandParameterIdentifier": "inputParam",
# "ParameterValue": blender_param
# }
# ]
# },
# {
# "Name": name_ren,
# "MinCores": job_nodes,
# "MaxCores": job_nodes,
# "WalltimeLimit": job_walltime_ren,
# "StandardOutputFile": 'stdout',
# "StandardErrorFile": 'stderr',
# "ProgressFile": 'stdprog',
# "LogFile": 'stdlog',
# "ClusterNodeTypeId": 8,
# "CommandTemplateId": 8,
# "Priority": 4,
# "JobArrays": job_arrays,
# "DependsOnName" : name_pre,
# "CpuHyperThreading": True,
# "TemplateParameterValues": [
# {
# "CommandParameterIdentifier": "inputParam",
# "ParameterValue": blender_param
# }
# ]
# },
# {
# "Name": name_pos,
# "MinCores": 24,
# "MaxCores": 24,
# "WalltimeLimit": job_walltime_post,
# "StandardOutputFile": 'stdout',
# "StandardErrorFile": 'stderr',
# "ProgressFile": 'stdprog',
# "LogFile": 'stdlog',
# "ClusterNodeTypeId": 6,
# "CommandTemplateId": 13,
# "Priority": 4,
# "JobArrays": job_arrays,
# "DependsOnName" : name_ren,
# "CpuHyperThreading": True,
# "EnvironmentVariables": [
# {
# "Name": "job_project",
# "Value": blender_job_info_new.job_project
# },
# {
# "Name": "job_email",
# "Value": blender_job_info_new.job_email
# },
# {
# "Name": "frame_start",
# "Value": frame_start
# },
# {
# "Name": "frame_end",
# "Value": frame_end
# }
# ],
# "TemplateParameterValues": [
# {
# "CommandParameterIdentifier": "inputParam",
# "ParameterValue": blender_param
# }
# ]
# }
# ]
# }
# data = {
# "JobSpecification": job,
# "SessionCode": token
# }
# resp_json = await raas_server.post("JobManagement/CreateJob", data)
# item = context.scene.raas_submitted_job_info_ext_new
# raas_server.fill_items(item, resp_json)
async
def
CreateJobMPPBUFFER
(
context
,
token
):
async
def
CreateJobMPPBUFFER
(
context
,
token
):
...
@@ -2617,22 +1630,13 @@ async def CreateJobMPPBUFFER(context, token):
...
@@ -2617,22 +1630,13 @@ async def CreateJobMPPBUFFER(context, token):
filenamext
=
os
.
path
.
basename
(
context
.
blend_data
.
filepath
)
filenamext
=
os
.
path
.
basename
(
context
.
blend_data
.
filepath
)
filename
=
os
.
path
.
splitext
(
filenamext
)
filename
=
os
.
path
.
splitext
(
filenamext
)
filename
=
filename
[
0
]
filename
=
filename
[
0
]
#blend_file = convert_path_to_linux(get_job_remote_storage_in(blender_job_info_new.job_name) / filename) + '.blend'
#render_output = convert_path_to_linux(get_job_remote_storage_out(blender_job_info_new.job_name) / '######')
job_nodes
=
blender_job_info_new
.
job_nodes
*
24
job_nodes
=
blender_job_info_new
.
job_nodes
*
24
#jobs = []
job
=
None
job
=
None
# if blender_job_info_new.render_type == 'IMAGE':
# job_arrays = '%d-%d:%d' % (blender_job_info_new.frame_current, blender_job_info_new.frame_current + 1, 2)
# else:
# job_arrays = '%d-%d:%d' % (blender_job_info_new.frame_start, blender_job_info_new.frame_end, 1)
if
blender_job_info_new
.
render_type
==
'
IMAGE
'
:
if
blender_job_info_new
.
render_type
==
'
IMAGE
'
:
job_arrays
=
None
#'%d-%d:%d' % (blender_job_info_new.frame_current, blender_job_info_new.frame_current + 1, 2)
job_arrays
=
None
frame_start
=
blender_job_info_new
.
frame_current
frame_start
=
blender_job_info_new
.
frame_current
frame_end
=
blender_job_info_new
.
frame_current
frame_end
=
blender_job_info_new
.
frame_current
else
:
else
:
...
@@ -2640,10 +1644,9 @@ async def CreateJobMPPBUFFER(context, token):
...
@@ -2640,10 +1644,9 @@ async def CreateJobMPPBUFFER(context, token):
frame_start
=
blender_job_info_new
.
frame_start
frame_start
=
blender_job_info_new
.
frame_start
frame_end
=
blender_job_info_new
.
frame_end
frame_end
=
blender_job_info_new
.
frame_end
#blender_param = '../%s --render-output ../%s --render-format %s' % (blend_file, render_output, blender_job_info_new.render_format)
blender_param
=
str
(
filename
)
+
'
.blend
'
blender_param
=
str
(
filename
)
+
'
.blend
'
job_name
=
blender_job_info_new
.
job_name
#'%s_%06d' % (blender_job_info_new.job_name, frame)
job_name
=
blender_job_info_new
.
job_name
job_walltime_pre
=
blender_job_info_new
.
job_walltime_pre
*
60
job_walltime_pre
=
blender_job_info_new
.
job_walltime_pre
*
60
job_walltime_ren
=
blender_job_info_new
.
job_walltime
*
60
job_walltime_ren
=
blender_job_info_new
.
job_walltime
*
60
...
@@ -2814,20 +1817,14 @@ async def CreateJob(context, token):
...
@@ -2814,20 +1817,14 @@ async def CreateJob(context, token):
if
blender_job_info_new
.
job_queue
==
'
CPU
'
:
if
blender_job_info_new
.
job_queue
==
'
CPU
'
:
await
CreateJobCPU
(
context
,
token
)
await
CreateJobCPU
(
context
,
token
)
# if blender_job_info_new.job_queue == 'CPURES':
# await CreateJobCPURES(context, token)
if
blender_job_info_new
.
job_queue
==
'
MPP
'
:
if
blender_job_info_new
.
job_queue
==
'
MPP
'
:
await
CreateJobMPP
(
context
,
token
)
await
CreateJobMPP
(
context
,
token
)
# if blender_job_info_new.job_queue == 'MPPRES':
# await CreateJobMPPRES(context, token)
if
blender_job_info_new
.
job_queue
==
'
MIC
'
:
if
blender_job_info_new
.
job_queue
==
'
MIC
'
:
await
CreateJobMIC
(
context
,
token
)
await
CreateJobMIC
(
context
,
token
)
# if blender_job_info_new.job_queue == 'MICRES':
# await CreateJobMICRES(context, token)
if
blender_job_info_new
.
job_queue
==
'
MICBUFFER
'
:
if
blender_job_info_new
.
job_queue
==
'
MICBUFFER
'
:
await
CreateJobMICBUFFER
(
context
,
token
)
await
CreateJobMICBUFFER
(
context
,
token
)
...
@@ -2839,34 +1836,7 @@ async def CreateJob(context, token):
...
@@ -2839,34 +1836,7 @@ async def CreateJob(context, token):
await
CreateJobMICINTERACTIVE
(
context
,
token
)
await
CreateJobMICINTERACTIVE
(
context
,
token
)
# class RAAS_OT_CreateJob(
# async_loop.AsyncModalOperatorMixin,
# AuthenticatedRaasOperatorMixin,
# Operator):
# """creates a new job"""
# bl_idname = 'raas.create_job'
# bl_label = 'CreateJob'
# async def async_execute(self, context):
# if not await self.authenticate(context):
# return
# await CreateJob(context, self.token)
# await SubmitJob(context, self.token)
# await ListJobsForCurrentUser(context, self.token)
# self.quit()
async
def
SubmitJob
(
context
,
token
):
async
def
SubmitJob
(
context
,
token
):
#idx = context.scene.raas_list_jobs_index
#try:
#item = context.scene.raas_list_jobs[idx]
item
=
context
.
scene
.
raas_submitted_job_info_ext_new
item
=
context
.
scene
.
raas_submitted_job_info_ext_new
data
=
{
data
=
{
...
@@ -2875,33 +1845,11 @@ async def SubmitJob(context, token):
...
@@ -2875,33 +1845,11 @@ async def SubmitJob(context, token):
}
}
resp_json
=
await
raas_server
.
post
(
"
JobManagement/SubmitJob
"
,
data
)
resp_json
=
await
raas_server
.
post
(
"
JobManagement/SubmitJob
"
,
data
)
#print(resp_json)
# except IndexError:
# pass
# class RAAS_OT_SubmitJob(
# async_loop.AsyncModalOperatorMixin,
# AuthenticatedRaasOperatorMixin,
# Operator):
# """submits a job to a HPC scheduler queue"""
# bl_idname = 'raas.submit_job'
# bl_label = 'SubmitJob'
# async def async_execute(self, context):
# if not await self.authenticate(context):
# return
# await SubmitJob(context, self.token)
# await ListJobsForCurrentUser(context, self.token)
# self.quit()
async
def
CancelJob
(
context
,
token
):
async
def
CancelJob
(
context
,
token
):
idx
=
context
.
scene
.
raas_list_jobs_index
idx
=
context
.
scene
.
raas_list_jobs_index
#try:
item
=
context
.
scene
.
raas_list_jobs
[
idx
]
item
=
context
.
scene
.
raas_list_jobs
[
idx
]
data
=
{
data
=
{
...
@@ -2911,8 +1859,6 @@ async def CancelJob(context, token):
...
@@ -2911,8 +1859,6 @@ async def CancelJob(context, token):
resp_json
=
await
raas_server
.
post
(
"
JobManagement/CancelJob
"
,
data
)
resp_json
=
await
raas_server
.
post
(
"
JobManagement/CancelJob
"
,
data
)
#except IndexError:
# pass
class
RAAS_OT_CancelJob
(
class
RAAS_OT_CancelJob
(
async_loop
.
AsyncModalOperatorMixin
,
async_loop
.
AsyncModalOperatorMixin
,
...
@@ -2946,62 +1892,29 @@ class RAAS_PT_ListJobs(RaasButtonsPanel, Panel):
...
@@ -2946,62 +1892,29 @@ class RAAS_PT_ListJobs(RaasButtonsPanel, Panel):
else
:
else
:
layout
.
enabled
=
False
layout
.
enabled
=
False
#header
box
=
layout
.
box
()
box
=
layout
.
box
()
row
=
box
.
row
()
row
=
box
.
row
()
col
=
row
.
column
()
col
=
row
.
column
()
col
.
label
(
text
=
"
Id
"
)
col
.
label
(
text
=
"
Id
"
)
# col = row.column()
# col.label(text="Name")
col
=
row
.
column
()
col
=
row
.
column
()
col
.
label
(
text
=
"
Project
"
)
col
.
label
(
text
=
"
Project
"
)
col
=
row
.
column
()
col
=
row
.
column
()
col
.
label
(
text
=
"
State
"
)
col
.
label
(
text
=
"
State
"
)
# col = row.column()
# box = col.box()
# box = col.box()
#box.separator_spacer()
# col = row.column()
# col.separator_spacer()
#table
#table
row
=
layout
.
row
()
row
=
layout
.
row
()
row
.
template_list
(
"
RAAS_UL_SubmittedJobInfoExt
"
,
""
,
context
.
scene
,
"
raas_list_jobs
"
,
context
.
scene
,
"
raas_list_jobs_index
"
)
row
.
template_list
(
"
RAAS_UL_SubmittedJobInfoExt
"
,
""
,
context
.
scene
,
"
raas_list_jobs
"
,
context
.
scene
,
"
raas_list_jobs_index
"
)
# col = row.column(align=True)
# col.operator("raas.list_jobs_for_current_user_actions", icon='ADD', text="").action = 'ADD'
# col.operator("raas.list_jobs_for_current_user_actions", icon='X', text="").action = 'CANCEL'
# col.operator("raas.list_jobs_for_current_user_actions", icon='REMOVE', text="").action = 'REMOVE'
#button
#button
row
=
layout
.
row
()
row
=
layout
.
row
()
row
.
operator
(
RAAS_OT_ListJobsForCurrentUser
.
bl_idname
,
text
=
'
Refresh
'
)
row
.
operator
(
RAAS_OT_ListJobsForCurrentUser
.
bl_idname
,
text
=
'
Refresh
'
)
#row.operator(RAAS_OT_NewJob.bl_idname, text='New')
#row.operator(RAAS_OT_SubmitJob.bl_idname, text='Submit')
row
.
operator
(
RAAS_OT_CancelJob
.
bl_idname
,
text
=
'
Cancel
'
)
row
.
operator
(
RAAS_OT_CancelJob
.
bl_idname
,
text
=
'
Cancel
'
)
idx
=
context
.
scene
.
raas_list_jobs_index
idx
=
context
.
scene
.
raas_list_jobs_index
# if idx == -1:
# item = context.scene.raas_blender_job_info_new
# box.enabled = True
# #box.label(text=('Job: %d' % item.Id))
# box.prop(item, "job_name")
# #box.prop(item, "state")
# box.prop(item, "job_priority")
# box.prop(item, "job_project")
# #box.prop(item, "creationTime")
# #box.prop(item, "submitTime")
# #box.prop(item, "startTime")
# #box.prop(item, "endTime")
# #box.prop(item, "totalAllocatedTime")
# #box.prop(item, "allParameters")
# box.operator(RAAS_OT_CreateJob.bl_idname, text='Create')
# else:
if
idx
!=
-
1
:
if
idx
!=
-
1
:
item
=
context
.
scene
.
raas_list_jobs
[
idx
]
item
=
context
.
scene
.
raas_list_jobs
[
idx
]
...
@@ -3010,9 +1923,7 @@ class RAAS_PT_ListJobs(RaasButtonsPanel, Panel):
...
@@ -3010,9 +1923,7 @@ class RAAS_PT_ListJobs(RaasButtonsPanel, Panel):
box
.
label
(
text
=
(
'
Job: %d
'
%
item
.
Id
))
box
.
label
(
text
=
(
'
Job: %d
'
%
item
.
Id
))
box
.
prop
(
item
,
"
Name
"
)
box
.
prop
(
item
,
"
Name
"
)
#box.prop(item, "Priority")
box
.
prop
(
item
,
"
Project
"
)
box
.
prop
(
item
,
"
Project
"
)
#box.prop(item, "creationTime")
box
.
prop
(
item
,
"
SubmitTime
"
)
box
.
prop
(
item
,
"
SubmitTime
"
)
box
.
prop
(
item
,
"
StartTime
"
)
box
.
prop
(
item
,
"
StartTime
"
)
box
.
prop
(
item
,
"
EndTime
"
)
box
.
prop
(
item
,
"
EndTime
"
)
...
@@ -3020,11 +1931,6 @@ class RAAS_PT_ListJobs(RaasButtonsPanel, Panel):
...
@@ -3020,11 +1931,6 @@ class RAAS_PT_ListJobs(RaasButtonsPanel, Panel):
row
=
box
.
column
()
row
=
box
.
column
()
row
.
prop
(
item
,
"
State
"
)
row
.
prop
(
item
,
"
State
"
)
#if len(item.endTime) == 0:
# row.prop(item, "statePre")
# row.prop(item, "stateRen")
# row.prop(item, "statePost")
box
=
layout
.
box
()
box
=
layout
.
box
()
local_storage
=
str
(
get_job_local_storage
(
item
.
Name
))
local_storage
=
str
(
get_job_local_storage
(
item
.
Name
))
...
@@ -3037,82 +1943,10 @@ class RAAS_PT_ListJobs(RaasButtonsPanel, Panel):
...
@@ -3037,82 +1943,10 @@ class RAAS_PT_ListJobs(RaasButtonsPanel, Panel):
text
=
''
,
icon
=
'
DISK_DRIVE
'
)
text
=
''
,
icon
=
'
DISK_DRIVE
'
)
props
.
path
=
local_storage
props
.
path
=
local_storage
# local_storage_out = str(get_job_local_storage_out(item.Name))
# #paths_layout = box.column(align=True)
# labeled_row = paths_layout.split(**raas_pref.factor(0.25), align=True)
# labeled_row.label(text='Result Path:')
# prop_btn_row = labeled_row.row(align=True)
# prop_btn_row.label(text=local_storage_out)
# props = prop_btn_row.operator(RAAS_OT_explore_file_path.bl_idname,
# text='', icon='DISK_DRIVE')
# props.path = local_storage_out
row
=
box
.
row
()
row
=
box
.
row
()
row
.
operator
(
RAAS_OT_download_files
.
bl_idname
,
text
=
'
Download results
'
)
row
.
operator
(
RAAS_OT_download_files
.
bl_idname
,
text
=
'
Download results
'
)
# row = box.row()
# row.operator(RAAS_OT_connect_to_client.bl_idname, text='Connect to client')
# raas_status = context.window_manager.raas_status
# if raas_status in {'IDLE', 'ABORTED', 'DONE'}:
# # if prefs.raas_show_quit_after_submit_button:
# # ui = layout.split(**raas_pref.factor(0.75), align=True)
# # else:
# ui = box
# # ui.operator(RAAS_OT_prepare_files.bl_idname,
# # text='Prepare files',
# # icon='RENDER_ANIMATION')
# #.quit_after_submit = False
# # if prefs.raas_show_quit_after_submit_button:
# # ui.operator(RAAS_OT_prepare_files.bl_idname,
# # text='Submit & Quit',
# # icon='RENDER_ANIMATION').quit_after_submit = True
# # if bpy.app.debug:
# # layout.operator(RAAS_OT_copy_files.bl_idname)
# elif raas_status == 'INVESTIGATING':
# row = box.row(align=True)
# row.label(text='Investigating your files')
# row.operator(RAAS_OT_abort.bl_idname, text='', icon='CANCEL')
# elif raas_status == 'COMMUNICATING':
# box.label(text='Communicating with Raas Server')
# elif raas_status == 'ABORTING':
# row = box.row(align=True)
# row.label(text='Aborting, please wait.')
# row.operator(RAAS_OT_abort.bl_idname, text='', icon='CANCEL')
# if raas_status == 'TRANSFERRING':
# row = box.row(align=True)
# row.prop(context.window_manager, 'raas_progress',
# text=context.window_manager.raas_status_txt)
# row.operator(RAAS_OT_abort.bl_idname, text='', icon='CANCEL')
# elif raas_status != 'IDLE' and context.window_manager.raas_status_txt:
# box.label(text=context.window_manager.raas_status_txt)
#render_output = render_output_path(context)
# labeled_row = paths_layout.split(**raas_pref.factor(0.25), align=True)
# labeled_row.label(text='Output Dir:')
# prop_btn_row = labeled_row.row(align=True)
# prop_btn_row.label(text=str(render_output))
# props = prop_btn_row.operator(RAAS_OT_explore_file_path.bl_idname,
# text='', icon='DISK_DRIVE')
# props.path = str(render_output.parent)
#########################################################################
# def raas_do_override_output_path_updated(scene, context):
# """Set the override paths to the default, if not yet set."""
# # Only set a default when enabling the override.
# if not scene.raas_do_override_output_path:
# return
# # Don't overwrite existing setting.
# if scene.raas_override_output_path:
# return
# scene.raas_override_output_path = raas_pref.preferences().raas_job_output_path
# log.info('Setting Override Output Path to %s', scene.raas_override_output_path)
#################################################
#################################################
...
@@ -3134,37 +1968,12 @@ def register():
...
@@ -3134,37 +1968,12 @@ def register():
scene
=
bpy
.
types
.
Scene
scene
=
bpy
.
types
.
Scene
# scene.raas_do_override_output_path = BoolProperty(
# name='Override Output Path for this Job',
# description='When enabled, allows you to specify a non-default Output path '
# 'for this particular job',
# default=False,
# update=raas_do_override_output_path_updated
# )
# scene.raas_override_output_path = StringProperty(
# name='Override Output Path',
# description='Path where to store output files, should be accessible for Workers',
# subtype='DIR_PATH',
# default='')
################UserAndLimitationManagement#################
#scene.raas_resource_usage_ext = bpy.props.CollectionProperty(type=RAAS_PG_ResourceUsageExt)
#scene.raas_resource_usage_ext_index = bpy.props.IntProperty()
#!!!!!!!!!!need to be deleted
################ClusterInformation#################
#scene.raas_cluster_node_usage_ext = bpy.props.CollectionProperty(type=RAAS_PG_ClusterNodeUsageExt)
#scene.raas_cluster_node_usage_ext_index = bpy.props.IntProperty()
#scene.raas_cluster_info_ext = bpy.props.CollectionProperty(type=RAAS_PG_ClusterInfoExt)
#scene.raas_cluster_info_ext_index = bpy.props.IntProperty()
#!!!!!!!!!!need to be deleted
################JobManagement#################
################JobManagement#################
scene
.
raas_list_jobs
=
bpy
.
props
.
CollectionProperty
(
type
=
RAAS_PG_SubmittedJobInfoExt
,
options
=
{
'
SKIP_SAVE
'
})
scene
.
raas_list_jobs
=
bpy
.
props
.
CollectionProperty
(
type
=
RAAS_PG_SubmittedJobInfoExt
,
options
=
{
'
SKIP_SAVE
'
})
scene
.
raas_list_jobs_index
=
bpy
.
props
.
IntProperty
(
default
=-
1
,
options
=
{
'
SKIP_SAVE
'
})
scene
.
raas_list_jobs_index
=
bpy
.
props
.
IntProperty
(
default
=-
1
,
options
=
{
'
SKIP_SAVE
'
})
scene
.
raas_blender_job_info_new
=
bpy
.
props
.
PointerProperty
(
type
=
RAAS_PG_BlenderJobInfo
,
options
=
{
'
SKIP_SAVE
'
})
scene
.
raas_blender_job_info_new
=
bpy
.
props
.
PointerProperty
(
type
=
RAAS_PG_BlenderJobInfo
,
options
=
{
'
SKIP_SAVE
'
})
scene
.
raas_submitted_job_info_ext_new
=
bpy
.
props
.
PointerProperty
(
type
=
RAAS_PG_SubmittedJobInfoExt
,
options
=
{
'
SKIP_SAVE
'
})
scene
.
raas_submitted_job_info_ext_new
=
bpy
.
props
.
PointerProperty
(
type
=
RAAS_PG_SubmittedJobInfoExt
,
options
=
{
'
SKIP_SAVE
'
})
#!!!!!!!!!!need to be deleted
#################################
#################################
bpy
.
types
.
WindowManager
.
raas_status
=
EnumProperty
(
bpy
.
types
.
WindowManager
.
raas_status
=
EnumProperty
(
...
@@ -3209,22 +2018,6 @@ def unregister():
...
@@ -3209,22 +2018,6 @@ def unregister():
except
RuntimeError
:
except
RuntimeError
:
log
.
warning
(
'
Unable to unregister class %r, probably already unregistered
'
,
cls
)
log
.
warning
(
'
Unable to unregister class %r, probably already unregistered
'
,
cls
)
# for name in ('raas_render_fchunk_size',
# 'raas_render_chunk_sample_cap',
# 'raas_render_frame_range',
# 'raas_render_job_type',
# 'raas_start_paused',
# 'raas_render_job_priority',
# 'raas_do_override_output_path',
# 'raas_override_output_path'):
# for name in ('raas_do_override_output_path',
# 'raas_override_output_path'):
# try:
# delattr(bpy.types.Scene, name)
# except AttributeError:
# pass
try
:
try
:
del
bpy
.
types
.
WindowManager
.
raas_status
del
bpy
.
types
.
WindowManager
.
raas_status
except
AttributeError
:
except
AttributeError
:
...
...
This diff is collapsed.
Click to expand it.
img/addon_authorization.png
+
0
−
0
View replaced file @
fe185c03
View file @
84ce3fce
151 KiB
|
W:
|
H:
159 KiB
|
W:
|
H:
2-up
Swipe
Onion skin
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment