Problem prototype inheritance res partner odoo 8 - odoo-8

I create new class by prototype inheritance from res partner and add fields and methods, but this has effect on res partner creation from UI I get this error:
Traceback (most recent call last):
File "/vagrant/odoo/openerp/http.py", line 546, in _handle_exception
return super(JsonRequest, self)._handle_exception(exception)
File "/vagrant/odoo/openerp/http.py", line 583, in dispatch
result = self._call_function(**self.params)
File "/vagrant/odoo/openerp/http.py", line 319, in _call_function
return checked_call(self.db, *args, **kwargs)
File "/vagrant/odoo/openerp/service/model.py", line 118, in wrapper
return f(dbname, *args, **kwargs)
File "/vagrant/odoo/openerp/http.py", line 316, in checked_call
return self.endpoint(*a, **kw)
File "/vagrant/odoo/openerp/http.py", line 812, in __call__
return self.method(*args, **kw)
File "/vagrant/odoo/openerp/http.py", line 412, in response_wrap
response = f(*args, **kw)
File "/vagrant/odoo/addons/web/controllers/main.py", line 944, in call_kw
return self._call_kw(model, method, args, kwargs)
File "/vagrant/odoo/addons/web/controllers/main.py", line 936, in _call_kw
return getattr(request.registry.get(model), method)(request.cr, request.uid, *args, **kwargs)
File "/vagrant/odoo/openerp/api.py", line 268, in wrapper
return old_api(self, *args, **kwargs)
File "/vagrant/odoo/openerp/api.py", line 372, in old_api
result = method(recs, *args, **kwargs)
File "/vagrant/odoo/openerp/api.py", line 266, in wrapper
return new_api(self, *args, **kwargs)
File "/vagrant/odoo/openerp/addons/base/res/res_partner.py", line 582, in create
return super(res_partner, self).create(vals)
File "/vagrant/odoo/openerp/api.py", line 266, in wrapper
return new_api(self, *args, **kwargs)
File "/vagrant/odoo/openerp/api.py", line 508, in new_api
result = method(self._model, cr, uid, *args, **old_kwargs)
File "/vagrant/odoo/addons/mail/mail_thread.py", line 381, in create
thread_id = super(mail_thread, self).create(cr, uid, values, context=context)
File "/vagrant/odoo/openerp/api.py", line 268, in wrapper
return old_api(self, *args, **kwargs)
File "/vagrant/odoo/openerp/api.py", line 372, in old_api
result = method(recs, *args, **kwargs)
File "/vagrant/odoo/openerp/models.py", line 4101, in create
record = self.browse(self._create(old_vals))
File "/vagrant/odoo/openerp/api.py", line 266, in wrapper
return new_api(self, *args, **kwargs)
File "/vagrant/odoo/openerp/api.py", line 508, in new_api
result = method(self._model, cr, uid, *args, **old_kwargs)
File "/vagrant/odoo/openerp/models.py", line 4313, in _create
cr, user, ids, fields2, context)
File "/vagrant/odoo/openerp/api.py", line 268, in wrapper
return old_api(self, *args, **kwargs)
File "/vagrant/odoo/openerp/models.py", line 4451, in _store_set_values
result = column.get(cr, self, ids, f, SUPERUSER_ID, context=context)
File "/vagrant/odoo/openerp/osv/fields.py", line 1462, in get
result = self._fnct(obj, cr, uid, ids, name, self._arg, context)
File "/vagrant/odoo/openerp/addons/base/res/res_partner.py", line 223, in <lambda>
_commercial_partner_id = lambda self, *args, **kwargs: self._commercial_partner_compute(*args, **kwargs)
File "/vagrant/odoo/openerp/api.py", line 268, in wrapper
return old_api(self, *args, **kwargs)
File "/vagrant/odoo/openerp/addons/base/res/res_partner.py", line 210, in _commercial_partner_compute
while not current_partner.is_company and current_partner.parent_id:
File "/vagrant/odoo/openerp/fields.py", line 841, in __get__
return record._cache[self]
File "/vagrant/odoo/openerp/models.py", line 6057, in __getitem__
return value.get() if isinstance(value, SpecialValue) else value
File "/vagrant/odoo/openerp/fields.py", line 56, in get
raise self.exception
MissingError: ('MissingError', u'One of the documents you are trying to access has been deleted, please try again after refreshing.')

The error happens when Odoo calls the fields.function's and try to stores the resulting values in the database directly.
When Odoo initializes fields and process store parameter it uses the key defined in store dictionary as model name (res.partner) and the class name new.partner as the model name in the trigger list.
It happens exactly when it tries to get column value from a new partner using res partner ids, If you look in the res partner class you will see that res.partner model name is hardcoded in some fields when it defines the store attribute.
To avoid that issue you can redefine the commercial_partner_id field and set the store model to new.partner.
Example: alter res.partner fields store model
class NewPartner(osv.Model):
_name = "new.partner"
_description = "new.partner"
_inherit = ['res.partner', 'mail.thread', 'ir.needaction_mixin']
_rec_name = "name"
#api.multi
def _get_image(self, name, args):
return dict((p.id, tools.image_get_resized_images(p.image)) for p in self)
#api.one
def _set_image(self, name, value, args):
return self.write({'image': tools.image_resize_image_big(value)})
_commercial_partner_id = lambda self, *args, **kwargs: self._commercial_partner_compute(*args, **kwargs)
_display_name = lambda self, *args, **kwargs: self._display_name_compute(*args, **kwargs)
_commercial_partner_store_triggers = {
'new.partner': (lambda self, cr, uid, ids, context=None: self.search(cr, uid, [('id', 'child_of', ids)], context=dict(active_test=False)),
['parent_id', 'is_company'], 10)
}
_display_name_store_triggers = {
'new.partner': (lambda self, cr, uid, ids, context=None: self.search(cr, uid, [('id', 'child_of', ids)], context=dict(active_test=False)),
['parent_id', 'is_company', 'name'], 10)
}
_columns = {
'display_name': fields.function(_display_name, type='char', string='Name', store=_display_name_store_triggers, select=True),
'parent_id': fields.many2one('new.partner', 'Related Company', select=True),
'child_ids': fields.one2many('new.partner', 'parent_id', 'Contacts', domain=[('active', '=', True)]),
'image_medium': fields.function(_get_image, fnct_inv=_set_image,
string="Medium-sized image", type="binary", multi="_get_image",
store={
'new.partner': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10),
},
help="Medium-sized image of this contact. It is automatically " \
"resized as a 128x128px image, with aspect ratio preserved. " \
"Use this field in form views or some kanban views."),
'image_small': fields.function(_get_image, fnct_inv=_set_image,
string="Small-sized image", type="binary", multi="_get_image",
store={
'new.partner': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10),
},
help="Small-sized image of this contact. It is automatically " \
"resized as a 64x64px image, with aspect ratio preserved. " \
"Use this field anywhere a small image is required."),
'commercial_partner_id': fields.function(_commercial_partner_id, type='many2one', relation='new.partner', string='Commercial Entity',
store=_commercial_partner_store_triggers)
}

Related

How to convert grpc.ServerInterceptor to grcp.aio.ServerInterceptor

I am trying to implement async ServerInterceptor [grcp.aio.ServerInterceptor]. My current synchronous ServerInterceptor looks like this https://github.com/zhyon404/python-grpc-prometheus/blob/master/python_grpc_prometheus/prometheus_server_interceptor.py#L48. When i try to use grpc.aio.ServerInterceptor and start the server,
My Server code
from grpc_opentracing import open_tracing_server_interceptor
from grpc_opentracing.grpcext import intercept_server
import PromServerInterceptor
class MyServicer():
async def _start_async_server(self, tracer=None,service, grpc_port=8083, http_port=8080):
tracing_interceptor = open_tracing_server_interceptor(tracer)
server = aio.server(nterceptors=(PromServerInterceptor(),))
server = intercept_server(server, tracing_interceptor)
my_service_pb2_grpc.add_MyServicer_to_server(service, server)
server.add_insecure_port("[::]:" + str(grpc_port))
await server.start()
logger.info("Started prometheus server at port %s", http_port)
prometheus_client.start_http_server(http_port)
await server.wait_for_termination()
def async_serve(self, tracer=None, service, grpc_port=8083, http_port=8080):
loop = asyncio.get_event_loop()
loop.create_task(self._start_async_server(service, tracer, grpc_port, http_port))
loop.run_forever()
Following are the lib versions i am using:
grpcio=1.32.0
grpcio-opentracing==1.1.4
I see the following error:
File "src/python/grpcio/grpc/_cython/_cygrpc/aio/server.pyx.pxi", line 646, in grpc._cython.cygrpc._handle_exceptions
File "src/python/grpcio/grpc/_cython/_cygrpc/aio/server.pyx.pxi", line 745, in _handle_rpc
File "src/python/grpcio/grpc/_cython/_cygrpc/aio/server.pyx.pxi", line 511, in _handle_unary_unary_rpc
File "src/python/grpcio/grpc/_cython/_cygrpc/aio/server.pyx.pxi", line 368, in _finish_handler_with_unary_response
File "prometheus_server_interceptor.py", line 93, in new_behavior
rsp = await behavior(request_or_iterator, service_context)
File "/anaconda3/lib/python3.7/site-packages/grpc_opentracing/grpcext/_interceptor.py", line 272, in adaptation
_UnaryServerInfo(self._method), handler)
File "/anaconda3/lib/python3.7/site-packages/grpc_opentracing/_server.py", line 145, in intercept_unary
timeout=servicer_context.time_remaining(),
AttributeError: 'grpc._cython.cygrpc._ServicerContext' object has no attribute 'time_remaining
Following is my PromServerInterceptor implemenation:
from grpc import aio
import grpc
from timeit import default_timer
from python_grpc_prometheus.server_metrics import (SERVER_HANDLED_LATENCY_SECONDS,
SERVER_HANDLED_COUNTER,
SERVER_STARTED_COUNTER,
SERVER_MSG_RECEIVED_TOTAL,
SERVER_MSG_SENT_TOTAL)
from python_grpc_prometheus.util import type_from_method
from python_grpc_prometheus.util import code_to_string
def _wrap_rpc_behavior(handler, fn):
if handler is None:
return None
if handler.request_streaming and handler.response_streaming:
behavior_fn = handler.stream_stream
handler_factory = grpc.stream_stream_rpc_method_handler
elif handler.request_streaming and not handler.response_streaming:
behavior_fn = handler.stream_unary
handler_factory = grpc.stream_unary_rpc_method_handler
elif not handler.request_streaming and handler.response_streaming:
behavior_fn = handler.unary_stream
handler_factory = grpc.unary_stream_rpc_method_handler
else:
behavior_fn = handler.unary_unary
handler_factory = grpc.unary_unary_rpc_method_handler
return handler_factory(fn(behavior_fn,
handler.request_streaming,
handler.response_streaming),
request_deserializer=handler.request_deserializer,
response_serializer=handler.response_serializer)
def split_call_details(handler_call_details, minimum_grpc_method_path_items=3):
parts = handler_call_details.method.split("/")
if len(parts) < minimum_grpc_method_path_items:
return '', '', False
grpc_service, grpc_method = parts[1:minimum_grpc_method_path_items]
return grpc_service, grpc_method, True
class PromServerInterceptor(aio.ServerInterceptor):
async def intercept_service(self, continuation, handler_call_details):
handler = await continuation(handler_call_details)
if handler is None:
return handler
# only support unary
if handler.request_streaming or handler.response_streaming:
return handler
grpc_service, grpc_method, ok = split_call_details(handler_call_details)
if not ok:
return continuation(handler_call_details)
grpc_type = type_from_method(handler.request_streaming, handler.response_streaming)
SERVER_STARTED_COUNTER.labels(
grpc_type=grpc_type,
grpc_service=grpc_service,
grpc_method=grpc_method).inc()
def latency_wrapper(behavior, request_streaming, response_streaming):
async def new_behavior(request_or_iterator, service_context):
start = default_timer()
SERVER_MSG_RECEIVED_TOTAL.labels(
grpc_type=grpc_type,
grpc_service=grpc_service,
grpc_method=grpc_method
).inc()
# default
code = code_to_string(grpc.StatusCode.UNKNOWN)
try:
rsp = await behavior(request_or_iterator, service_context)
if service_context._state.code is None:
code = code_to_string(grpc.StatusCode.OK)
else:
code = code_to_string(service_context._state.code)
SERVER_MSG_SENT_TOTAL.labels(
grpc_type=grpc_type,
grpc_service=grpc_service,
grpc_method=grpc_method
).inc()
return rsp
except grpc.RpcError as e:
if isinstance(e, grpc.Call):
code = code_to_string(e.code())
raise e
finally:
SERVER_HANDLED_COUNTER.labels(
grpc_type=grpc_type,
grpc_service=grpc_service,
grpc_method=grpc_method,
grpc_code=code
).inc()
SERVER_HANDLED_LATENCY_SECONDS.labels(
grpc_type=grpc_type,
grpc_service=grpc_service,
grpc_method=grpc_method).observe(max(default_timer() - start, 0))
return new_behavior
return _wrap_rpc_behavior(handler, latency_wrapper)
The interceptor from grpc.aio can look like:
class RequestIdInterceptor(grpc.aio.ServerInterceptor):
async def intercept_service(self, continuation, handler_call_details):
for (header, value) in handler_call_details.invocation_metadata:
if header == "request_id":
...
break
return await continuation(handler_call_details)
To convert it you need to add async for intercept_service and await for continuation.

Make an ArrayList whose attributes have ArrayList too

I have a problem to make data ArrayList whose attributes have ArrayList too. The model data looks like this code.
data class DataArrayInArray02(
val no: Int? = null,
val dataArray: ArrayList<Int>
)
I want to get a data DataArrayInArray02 like this.
This is my code
fun main() {
val dataArrayInArray = ArrayList<DataArrayInArray02>()
val dataChildrenArray = ArrayList<Int>()
for (i in 0..3) {
val data = (0..10).random()
for (j in 0..data) {
val d = (1..1000).random()
dataChildrenArray.add(d)
}
dataArrayInArray.add(DataArrayInArray02(i+1, dataChildrenArray))
println("ID : ${dataArrayInArray[i].no}, Data : ${dataArrayInArray[i].dataArray}")
dataChildrenArray.clear()
}
}
When I run this code, I get the result like the picture above.
I call dataArrayInArray using looping "for" looks like this.
for (j in 0 until dataArrayInArray.size) {
println("ID : ${dataArrayInArray[j].no}, Data : ${dataArrayInArray[j].dataArray}")
}
But, I get the result like this.
So, which code is incorrect? Is it because of using dataChildrenArray.clear()? If I delete that code. The result will get like this:
ID : 1, Data : [915, 565, 591, 254, 67]
ID : 2, Data : [915, 565, 591, 254, 67, 258, 57, 767, 866, 986, 558, 187, 976]
where ID 2 should only display data [258, 57, 767, 866, 986, 558, 187, 976].
How to solve that problem?
You're passing the same instance of an ArrayList<Int> to each instance of DataArrayInArray02 in the top level list. To fix your code, move the line
val dataChildrenArray = ArrayList<Int>()
inside your for loop so a new one is created for each child.
If you're OK with using List instead of ArrayList, you could simplify this:
val dataArrayInArray = (1..4).map { i ->
val innerListSize = (0..10).random()
val innerList = (0..innerListSize).map { (1..1000).random() }
DataArrayInArray02(i, innerList)
}

Handle NetworkException in Spring Cloud Stream with Kafka

I've the following Listener - Producer in Spring Cloud Stream:
#StreamListener(target = MultipleProcessor.DOTCONN_INPUT, condition= "headers['kafka_receivedTopic']=='dotconnectorissues'")
public void inputDot(Message<DotConnectorIssue> messageIn) {
DotConnectorIssue data = messageIn.getPayload();
ObjectMapper mapper = new ObjectMapper();
DotConnectorUpdateDto dataMapped = new DotConnectorUpdateDto(data);
if (dataMapped.getPlantCode().equals(plantCode)) {
log.info("incoming dotConnectorIssue " + data);
try {
Message<String> messageOut = MessageBuilder
.withPayload(mapper.writeValueAsString(dataMapped))
.setHeader(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.APPLICATION_JSON)
.setHeader("type", "dotconnectorissue")
.build();
boolean send = ehProcessor.outputAndon().send(messageOut, 15000L);
log.info("sent message: "+ send);
if (!send) messagePool.getPool().add(messageOut);
} catch (JsonProcessingException e) {
log.error("error during creating json", e);
}
}
}
the piece of code works but, sometimes the message fail to be sent because of the following error:
[kafka-producer-network-thread | producer-2] ERROR o.s.k.s.LoggingProducerListener.onError -
Exception thrown when sending a message with key='null' and payload='{123, 34, 116, 121, 112, 101, 34, 58, 34, 85, 80, 68, 65, 84, 69, 95, 68, 79, 84, 67, 79, 78, 78, 69...' to topic andon:
org.apache.kafka.common.errors.NetworkException: The server disconnected before a response was received.
also if the send variable is true.
How can I handle the NetworkException error in Spring Cloud Stream?
Set the sync property to true and catch the exception or, to get the error asynchronously, set the producer property errorChannelEnabled.

BuildBot: unable to inject extended event handler in master.cfg

Buildbot version: 1.3.0
Twisted version: 18.4.0
my master.cfg
# -*- python -*-
# ex: set filetype=python:
from buildbot.plugins import *
from config import *
import os
from hooks import CustomHandler
# ip related imports
import socket
import fcntl
import struct
def get_ip_address(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', bytes(ifname[:15], 'utf-8'))
)[20:24])
#--------------------------------------------------------------
# CONSTANTS
localhost = get_ip_address(NETWORK_INTERFACE)
HOME = os.environ['HOME']
#--------------------------------------------------------------
c = BuildmasterConfig = {}
####### WORKERS
c['workers'] = [worker.Worker("example-worker", "pass")]
c['protocols'] = {'pb': {'port': 9989}}
c['www'] = dict(port=8010,plugins=dict(waterfall_view={}, console_view={}, grid_view={}))
####### CHANGESOURCES
c['www']['change_hook_dialects'] = { }
c['www']['change_hook_dialects']['bitbucketcloud'] = {
'class': CustomHandler
}
####### SCHEDULERS
c['schedulers'] = []
c['schedulers'].append(schedulers.SingleBranchScheduler(
name="schedulerQueryBuilder",
change_filter=util.ChangeFilter(repository_re='.*/querybuilder',
branch_re='.*/build_bot_testing',
category='push'),
treeStableTimer=None,
builderNames=["BuilderOfQueryBuilder"]
))
c['schedulers'].append(schedulers.ForceScheduler(
name="build",
builderNames=["BuilderOfQueryBuilder"]))
####### BUILDERS
BitBucketFactory = util.BuildFactory()
BitBucketFactory.addStep(steps.Git(
repourl='git#host:A/B.git',
branch='build_bot_testing',
retryFetch=True,
clobberOnFailure=True,
mode='incremental'))
BitBucketFactory.addStep(steps.PyLint(command=['pylint', '--py3k', '--ignore ln2sql',
'--disable C,import-error,import-error,too-few-public-methods,undefined-variable',
'swagger_server'],
haltOnFailure=True))
BitBucketFactory.addStep(steps.Test(command=["pytest", "swagger_server"],
haltOnFailure=False))
BitBucketFactory.addStep(steps.ShellSequence(commands=[
util.ShellArg(command=['zip', '-r', '/home/rajat/buildbot/worker/BuilderOfQueryBuilder/build.zip',
'/home/rajat/buildbot/worker/BuilderOfQueryBuilder/build'],
logfile='ShellSequenceLog', haltOnFailure=True),
util.ShellArg(command=['scp', '-i', '/home/rajat/.ssh/id_rsa.pub', '-rp',
'/home/rajat/buildbot/worker/BuilderOfQueryBuilder/build.zip',
'rajat#192.168.0.21:/home/rajat/buildbot/querybuilder/build.zip'],
logfile='ShellSequenceLog', haltOnFailure=True),
util.ShellArg(command=['ssh', '-i', '/home/rajat/.ssh/id_rsa.pub', 'rajat#192.168.0.21',
'rm','-rf', '/home/rajat/buildbot/querybuilder/build'],
logfile='ShellSequenceLog', flunkOnFailure=True),
util.ShellArg(command=['ssh', '-i', '/home/rajat/.ssh/id_rsa.pub', 'rajat#192.168.0.21',
'unzip','-o', '/home/rajat/buildbot/querybuilder/build.zip', '-d', '/home/rajat/buildbot/querybuilder/'],
logfile='ShellSequenceLog', haltOnFailure=True),
util.ShellArg(command=['ssh', '-i', '/home/rajat/.ssh/id_rsa.pub', 'rajat#192.168.0.21',
'/home/rajat/anaconda3/envs/querybuilder/bin/pip',
'install', '--upgrade', 'setuptools'],
logfile='ShellSequenceLog', haltOnFailure=True),
util.ShellArg(command=['ssh', '-i', '/home/rajat/.ssh/id_rsa.pub', 'rajat#192.168.0.21',
'/home/rajat/anaconda3/envs/querybuilder/bin/pip', 'install', '-r', '/home/rajat/buildbot/querybuilder/build/requirements.txt'],
logfile='ShellSequenceLog', haltOnFailure=True),
util.ShellArg(command=['ssh', '-f', '-i', '/home/rajat/.ssh/id_rsa.pub', 'rajat#192.168.0.21',
'cd', '/home/rajat/buildbot/querybuilder/build','&&','nohup','/home/rajat/anaconda3/envs/querybuilder/bin/python', '-m',
'swagger_server', '&>',
'/home/rajat/logs/log.txt', '&'],
logfile='ShellSequenceLog', haltOnFailure=True),
]))
# BitBucketFactory.workdir = 'customDirName'
# Default is build in path /worker/<BuilderName>/build/<fetched files>
c['builders'] = []
c['builders'].append(util.BuilderConfig(
name="BuilderOfQueryBuilder",
workernames=["example-worker"],
factory=BitBucketFactory,
properties = {'owner': [
'foo#leadics.com'
]
}))
####### BUILDBOT SERVICES
c['services'] = []
c['services'].append(reporters.MailNotifier(fromaddr="buildbot#foo.com",
sendToInterestedUsers=True,
#extraRecipients=["foo#bar.com"],
useTls=True, relayhost="smtp.gmail.com",
smtpPort=587, smtpUser="buildbot#company.com",
smtpPassword="password"))
####### PROJECT IDENTITY
c['title'] = "BitBucket_BuildBot"
c['titleURL'] = "https://buildbot.github.io/hello-world/"
c['buildbotURL'] = "http://"+localhost+":8010/"
####### DB URL
c['db'] = {
'db_url' : "sqlite:///state.sqlite",
}
hooks.py
from buildbot.www.hooks.bitbucketcloud import BitbucketCloudEventHandler
class CustomHandler(BitbucketCloudEventHandler):
def handle_repo_push(self, payload):
changes = []
#project = payload['repository']['name']
repo_url = payload['repository']['links']['self']['href']
web_url = payload['repository']['links']['html']['href']
for payload_change in payload['push']['changes']:
if payload_change['new']:
age = 'new'
category = 'push'
else: # when new is null the ref is deleted
age = 'old'
category = 'ref-deleted'
commit_hash = payload_change[age]['target']['hash']
if payload_change[age]['type'] == 'branch':
branch = GIT_BRANCH_REF.format(payload_change[age]['name'])
elif payload_change[age]['type'] == 'tag':
branch = GIT_TAG_REF.format(payload_change[age]['name'])
change = {
'revision': commit_hash,
'revlink': '{}/commits/{}'.format(web_url, commit_hash),
'repository': repo_url,
'author': '{} <{}>'.format(payload['actor']['display_name'],
payload['actor']['username']),
'comments': 'Bitbucket Cloud commit {}'.format(commit_hash),
'branch': branch,
#'project': project,
'category': category
}
if callable(self._codebase):
change['codebase'] = self._codebase(payload)
elif self._codebase is not None:
change['codebase'] = self._codebase
changes.append(change)
return (changes, payload['repository']['scm'])
but still it's pointing to the buildbot.www.hooks.bitbucketcloud 's BitbucketCloudEventHandler
getting following error
2018-07-16 17:02:05+0530 [_GenericHTTPChannelProtocol,0,127.0.0.1] adding changes from web hook
Traceback (most recent call last):
File "/home/rajat/anaconda3/envs/buildbot/lib/python3.6/site-packages/twisted/internet/defer.py", line 1532, in unwindGenerator
return _inlineCallbacks(None, gen, Deferred())
File "/home/rajat/anaconda3/envs/buildbot/lib/python3.6/site-packages/twisted/internet/defer.py", line 1386, in _inlineCallbacks
result = g.send(result)
File "/home/rajat/anaconda3/envs/buildbot/lib/python3.6/site-packages/buildbot/www/change_hook.py", line 107, in getAndSubmitChanges
changes, src = yield self.getChanges(request)
File "/home/rajat/anaconda3/envs/buildbot/lib/python3.6/site-packages/twisted/internet/defer.py", line 1532, in unwindGenerator
return _inlineCallbacks(None, gen, Deferred())
--- <exception caught here> ---
File "/home/rajat/anaconda3/envs/buildbot/lib/python3.6/site-packages/twisted/internet/defer.py", line 1386, in _inlineCallbacks
result = g.send(result)
File "/home/rajat/anaconda3/envs/buildbot/lib/python3.6/site-packages/buildbot/www/change_hook.py", line 168, in getChanges
changes, src = yield handler.getChanges(request)
File "/home/rajat/anaconda3/envs/buildbot/lib/python3.6/site-packages/buildbot/www/hooks/bitbucketcloud.py", line 165, in getChanges
return self.process(request)
File "/home/rajat/anaconda3/envs/buildbot/lib/python3.6/site-packages/buildbot/www/hooks/bitbucketcloud.py", line 56, in process
return handler(payload)
File "/home/rajat/anaconda3/envs/buildbot/lib/python3.6/site-packages/buildbot/www/hooks/bitbucketcloud.py", line 75, in handle_repo_push
project = payload['repository']['project']['name']
builtins.KeyError: 'project'
what am i doing wrong ?
i have to override the class because bitbucket has changed their hook json and project key is removed.
any help'll be appreciated.

PyQt Get the result from a popup window

I want to open a popup window in pyqt interface using this class
class MyPopup(QWidget):
def __init__(self):
QWidget.__init__(self)
self.initUI()
self.res=0
def initUI(self):
self.btn = QtGui.QPushButton('Continue', self)
self.btn.move(20, 160)
self.btn.clicked.connect(self.showDialogcontinue)
self.btn = QtGui.QPushButton('Quit', self)
self.btn.move(180,160)
self.btn.clicked.connect(self.showDialogstop)
self.setGeometry(600, 600, 290, 150)
self.setWindowTitle('EUT Setup')
self.show()
def showDialogcontinue(self):
self.close()
self.res=1
def showDialogstop(self):
self.close()
self.res=0
So when I use it, in a push button method
self.w = MyPopup()
self.w.setGeometry(QRect(100, 100, 400, 200))
self.w.show()
if self.w.res==1:
print "start"
self.__thread.start()
else:
print "stop"
I can't get the result to launch or not mythread ___thread .
Please what's wrong? Could you help?
The problem is that showing a widget does not block the execution of code. So the if check is reached long before any button in the widget is clicked.
To solve this you can change the parent class to a QDialog, and show it with exec_() which will block until the dialog is closed.
And setting self.res=0 before self.initUI() since anything after self.initUI() would be called after the dialog closed. And that would reset res to 0 again.
class MyPopup(QtGui.QDialog):
def __init__(self, parent=None):
super(MyPopup, self).__init__(parent)
self.res=0
self.initUI()
def initUI(self):
self.btn = QtGui.QPushButton('Continue', self)
self.btn.move(20, 160)
self.btn.clicked.connect(self.showDialogcontinue)
self.btn = QtGui.QPushButton('Quit', self)
self.btn.move(180,160)
self.btn.clicked.connect(self.showDialogstop)
self.setGeometry(600, 600, 290, 150)
self.setWindowTitle('EUT Setup')
self.exec_()
def showDialogcontinue(self):
self.res=1
self.close()
def showDialogstop(self):
self.res=0
self.close()
if all you need is a single true\false value back from the dialog. You can do it easier using the accept\reject functions of a QDialog.
class MyPopup(QtGui.QDialog):
def __init__(self):
super(MyPopup, self).__init__()
self.initUI()
def initUI(self):
self.btn = QtGui.QPushButton('Continue', self)
self.btn.move(20, 160)
self.btn.clicked.connect(self.accept)
self.btn = QtGui.QPushButton('Quit', self)
self.btn.move(180,160)
self.btn.clicked.connect(self.reject)
self.setGeometry(600, 600, 290, 150)
self.setWindowTitle('EUT Setup')
w = MyPopup()
if w.exec_():
print("Start thread")
else:
print("Stop")

Resources