input
stringlengths
0
27.7k
created_at
stringlengths
29
29
i'm using next-auth and so far i've managed to get the accesstoken into both my server-session and client session. but i'm trying to get my refreshtoken into my server-session, but the only way i manage to do it is if i include it in the session callback, but then it's also added in the client-session, and i don't want that... so i don't really understand why it don't work. this is what user looks like then logging it in the jwt callback on signin user { accesstoken: 'eyjhbgc...', refreshtoken: 'eyjhbgc...', name: 'company' } so it's still there, but seem to dissapear before it reaches the server session: server session { user: { name: 'company', email: undefined, image: undefined }, accesstoken: 'eyjhbgc...', organisation: { name: 'company' } } my route.js file for next-auth: import nextauth, { nextauthoptions } from next-auth ; import credentialsprovider from next-auth/providers/credentials ; import { isjwtexpired } from ../../../../utils/utils ; const backendbaseurl = ${process.env.nextpublicbackendbaseurl}; const handlejwtexpired = async (token) = { console.log( token expired, refreshing ); const backendbaseurl = ${process.env.nextpublicbackendbaseurl}; const payload = json.stringify({ refresh: token.refreshtoken, }); const response = await fetch(${backendbaseurl}/api/auth/token/refresh/, { method: post , headers: { content-type : application/json , }, body: payload, }); if (response.ok) { console.log( token refreshed ); // extract the returned token from the drf backend const responsedata = await response.json(); const { access: customaccesstoken, accessexpiration: customaccessexpiration, } = responsedata; // calculate 'iat' and 'exp' based on the current time and expiration time const now = math.floor(date.now() / 1000); // current time in unix time (seconds) const exptime = new date(customaccessexpiration).gettime() / 1000; // convert expiration time to unix time token = { ...token, accesstoken: customaccesstoken, iat: now, exp: exptime, }; } else { token = { ...token, accesstoken: , }; console.log( error in response , response.status); } return token; }; export const authoptions = { pages: { signin: /auth/signin , // custom sign-in page path }, providers: [ credentialsprovider({ name: credentials , credentials: { email: { label: email , type: text , placeholder: user[USER].com , }, password: { label: password , type: password }, }, async authorize(credentials, req) { // make request to baseurl/api/auth/login/ with email and password body const url = ${backendbaseurl}/api/token/; const response = await fetch(url, { method: post , headers: { content-type : application/json , }, body: json.stringify(credentials), }); if (!response.ok) { return null; } const responsedata = await response.json(); const { access: customaccesstoken, refresh: customrefreshtoken, organisation: { name }, } = responsedata; if (customaccesstoken) { const responsebody = { accesstoken: customaccesstoken, refreshtoken: customrefreshtoken, name: name, }; return responsebody; } else { return null; } }, }), ], secret: process.env.nextauthsecret, callbacks: { async jwt({ token, account, trigger, session, profile, user }) { // update server session with new data if (account && user) { console.log('user', user) return { ...token, accesstoken: user.accesstoken, refreshtoken: user.refreshtoken, accesstokenexpires: date.now() + user.expiresin * 1000, // assuming you have expiresin value }; } // if (trigger === update ) { // console.log('updating server session...') // return { ...token, ...session.organisation }; // } if ( typeof token.accesstoken === string && isjwtexpired(token.accesstoken) ) { token = await handlejwtexpired(token); } return token; }, async session({ session, token: jwttoken, user, trigger }) { const token = jwttoken; session.accesstoken = token.accesstoken; // session.refreshtoken = token.refreshtoken; // if (!session.user) { // session.organisation = { name: }; // } session.organisation = session.organisation || { name: }; session.organisation.name = token.name || null; // session.user.lastname = token.last_name || null; // session.user.location = token.location; // session.user.username = token.username; return session; }, }, }; const handler = nextauth(authoptions); export { handler as get, handler as post };
2024-02-29 07:07:52.127000000
given json of: { animals : [ { dog : { name : buster , breed : poodle , collartag : [HASH] } }, { snake : { name : sammy , species : copperhead } }, { dog : { name : rex , breed : mixed , collartag : [HASH] } } ] } i need to transform the array of mixed elements to an array that only includes dogs and the last 4 digits of the collartag . what is the best way to do this, filter, reduce, something else? and how would it be done. i am not the strongest at typescript and get hung up on some of the simple things.
2024-02-08 20:32:26.520000000
i am trying to put settapaction of my complication to call the activity, but it"s not working whenever i tap it does not do anything just a ripple effect, my code is below. class customcomplication : suspendingcomplicationdatasourceservice() { // retrieves your data, in this case, we grab an incrementing number from datastore. val prefs = getsharedpreferences( myprefsname , modeprivate) val tempf = prefs.getstring( tempf , ) // no name defined is the default value. val intent = intent(this, mainactivity::class.java).apply { component = componentname(this[USER], mainactivity::class.java) } val pendingintent = pendingintent.getactivity(this, 0, intent, pendingintent.flagupdatecurrent) override fun getpreviewdata(type: complicationtype): complicationdata? { if (type != complicationtype.shorttext) { return null } return createcomplicationdata(tempf.tostring(), monday ) } override suspend fun oncomplicationrequest(request: complicationrequest): complicationdata? { log.d( babt , oncomplicationrequest() id: ${request.complicationinstanceid} ) return when (request.complicationtype) { complicationtype.short_text - shorttextcomplicationdata.builder( text = plaincomplicationtext.builder(text = tempf.tostring()).build(), contentdescription = plaincomplicationtext .builder(text = short text version of number. ).build(), ).setmonochromaticimage( monochromaticimage.builder( image = icon.createwithresource(this, r.drawable.iconforbody), ).build(), ) .settapaction(pendingintent) .build() else - { if (log.isloggable( babt , log.warn)) { log.w( babt , unexpected complication type ${request.complicationtype} ) } null } } } private fun createcomplicationdata(text: string, contentdescription: string) = shorttextcomplicationdata.builder( text = plaincomplicationtext.builder(text).build(), contentdescription = plaincomplicationtext.builder(contentdescription).build() ).build() } i tried above code but it is not working
2024-02-17 17:34:20.860000000
it is just the regularprice deducted the saleprice in woocommerce.
2024-03-01 11:19:58.727000000
in qt 6, an easier solution is to use brushes to apply colors to a qtreewidgetitem. you need to create a qbrush with a qcolor representing the color you want and apply this qbrush to your item. this will not prevent the stylesheet from modifying the items. qtreewidget tree; qtreewidgetitem item = new qtreewidgetitem(tree, qstringlist( col1 , col2 )); qbrush yellowbrush{ qcolor(0xffff00) }; qbrush bluebrush{ qcolor(0, 0, 255) }; item- setforeground(0, yellowbrush); item- setbackground(1, bluebrush); this method is used by qt to create the ui_filename.cpp file from the corresponding filename.ui form file. [LINK]>
2024-03-08 16:49:11.907000000
an anti-best practice solution, but useful for ad hoc data generation tasks. you can use some existing table and get rownumber() select rownumber() over() as i from some_table limit 10
2024-03-22 13:52:07.460000000
i have a pet peeve against the misuse of id to mean id. unfortunately, the identifiable protocol requires a hard-coded member called id for conformance; fortunately you can use a computed property for it. but doing so creates a perplexing complaint from the compiler: struct bmessage: codable, identifiable { var id: string // misspelling of id required for identifiable protocol { get { return id // ambiguous use of id } } var id: string = ... init() { id = uuid().uuidstring // ambiguous use of id } ... } to get around this, i have to put self.id all over the place. why?
2024-02-27 22:53:24.223000000
i just started learning credential authentication and based on my research, a lot of people recommended lucia-auth for credential authentication. i am following every step in their documentation, but still getting errors in my code. i have attached a screenshot of the lines of code thats flagged as error my schema //model.ts import { mongodbadapter } from [USER]-auth/adapter-mongodb ; import mongoose from mongoose ; const schema = mongoose.schema; const userschema = new schema( { id: { type: string, required: true }, username: { type: string } , password: { type: string } } as const, { id: false }, ) const sessionschema = new schema( { id: { type: string, required: true }, userid: { type: string, required: true }, expiresat: { type: date, required: true } } as const, { id: false } ) export const user = mongoose.models.user ?? mongoose.model( user , userschema); export const session = mongoose.models.session ?? mongoose.model( session , sessionschema); my login function //route.ts import link from next/link ; import { argon2id } from oslo/password ; import { cookies } from next/headers ; import { redirect } from next/navigation ; import { lucia } from @/app/auth/lucia ; import { actionresult } from @/app/lib/form ; import { user } from @/app/lib/models ; async function login(: any, formdata: formdata): promise actionresult { use server ; const username = formdata.get( username ); if ( typeof username !== string || username.length 3 || username.length 31 || !/^[a-z0-9-]+$/.test(username) ) { return { error: invalid username }; } const password = formdata.get( password ); if (typeof password !== string || password.length 6 || password.length 255) { return { error: invalid password }; } const existinguser = user.find({username}) if (!existinguser) { return { error: incorrect username or password }; } const validpassword = await new argon2id().verify(existinguser.password, password); if (!validpassword) { return { error: incorrect username or password }; } const session = await lucia.createsession(existinguser.id, {}); const sessioncookie = lucia.createsessioncookie(session.id); cookies().set(sessioncookie.name, sessioncookie.value, sessioncookie.attributes); return redirect( / ); } initialization of lucia //lucia.ts import { lucia } from lucia ; import { cookies } from next/headers ; import { cache } from react ; import type { databaseuser, session, user } from lucia ; import { mongodbadapter } from [USER]-auth/adapter-mongodb ; import mongoose from mongoose ; const adapter = new mongodbadapter( mongoose.connection.collection( sessions ), mongoose.connection.collection( users ) ); export const lucia = new lucia(adapter, { sessioncookie: { attributes: { secure: process.env.node_env === production } }, getuserattributes: (attributes) = { return { username: attributes.username }; } }); export const validaterequest = cache( async (): promise { user: user; session: session } | { user: null; session: null } = { const sessionid = cookies().get(lucia.sessioncookiename)?.value ?? null; if (!sessionid) { return { user: null, session: null }; } const result = await lucia.validatesession(sessionid); // next.js throws when you attempt to set cookie when rendering page try { if (result.session && result.session.fresh) { const sessioncookie = lucia.createsessioncookie(result.session.id); cookies().set(sessioncookie.name, sessioncookie.value, sessioncookie.attributes); } if (!result.session) { const sessioncookie = lucia.createblanksessioncookie(); cookies().set(sessioncookie.name, sessioncookie.value, sessioncookie.attributes); } } catch {} return result; } ); declare module lucia { interface register { lucia: typeof lucia; databaseuserattributes: { username: string }; } } this is the screenshot of the error i am getting: property 'password' does not exist on type 'query any[], any, {}, any, find '. screenshot of lines of code with the errors can someone help and possibly explain this to me so i can make sense of it for future reference.
2024-03-27 01:18:19.673000000
o(n + m sqrt(k)) , where: n is the length of arr2 ; m is the length of arr1 ; k is the maximum number in arr1 . sqrt(k) is the same as k(1/2) , so it's sublinear and therefore cannot be simplified to just k . you may find more complete answers in the <a href="[LINK] science stack exchange</a>.
2024-03-16 17:50:27.550000000
i regularly have word documents where words are repeated, for example: they they ate their dinner. - i would like if possible to remove all duplicated words from my word documents without having to go through spell check and individually remove repeated words. i appreciate i can use find and replace, but with the transcribed spoken word the list of duplicate words is very long, as you can imagine. is there a quicker way to achieve this result? i have googled it of course, but all the results i have found are have you tried find and replace? - ah. yes. no thank you, lol. thanks in advance, s
2024-02-21 21:57:00.617000000
i am building a form that takes in a user's city and province and want to use the google places autocomplete feature. no matter what, i seem to be encountering errors. i have added the following script into my code: script src= [LINK] async defer/ then i added the following function into the component where the autocomplete will be used: function initautocomplete () { autocomplete = new google.maps.places.autocomplete( document.getelementbyid( city-and-province ), { types: ['establishment'], componentrestrictions: {'country': ['ca']}, fields: ['place_id', 'geometry', 'name'] } ) } i keep getting this error: initautocomplete is not a function. if i remove the callback from the script, or if i try and create a useeffect instead of using a function, i get the error google is undefined . i am so confused. does anyone know what the issue is?
2024-03-06 22:49:59.893000000
make sure your google account language is set to english united states.
2024-02-14 22:29:43.047000000
you may try this single array-style formula in cell_f2 : =map(a2:a,c2:c,e2:e,lambda(σ,δ,λ,if(or(σ= ,λ= ),,let(x,chooserows(filter(e2:e,a2:a=σ),countif(a2:σ,σ)+1), ifs(λ= signed ,λ,iserror(x),today(),1,x-1)))))
2024-02-28 21:37:33.977000000
some of my elements are animated and they use some js to detect when being viewed. in desktop view they work fine, but when in mobile they completely disappear, except for one. the reason why they disappear is because in their animation they slide say -100% x to the left with an opacity of 0 and move back to 0 x and set their opacity to 1. i can not figure out how to override the animation. here is the code for the [USER](width: 700px) part (what it should change to when it goes into mobile view on the inspect element tab). elements that i want to animate are given the class=hidden in the html, and then referenced in the css. when something is viewed the js adds the class show and removes it when not visible. this means that the .show has the final wanted position, and the .hidden and .hidden2 are simply the position they animate from. [USER] (max-width: 700px) { .hidden:nth-child(1) { transition: 0.75s; filter: blur(5px); transform: translatez(-100%); opacity: 0; } .hidden:nth-child(2) { transition: 0.75s; filter: blur(5px); transform: translatez(-100%); opacity: 0; } .hidden:nth-child(3) { transition: 0.75s; filter: blur(5px); transform: translatez(-100%); opacity: 0; } /this is the second round of images that animate. / .hidden2:nth-child(1) { transform: translatez(-100%); opacity: 0; filter: blur(5px); transition: 1s; } .hidden2:nth-child(2) { transform: translatez(-100%); opacity: 0; filter: blur(5px); transition: 1s; transition-delay: 150ms; } .hidden2:nth-child(3) { transform: translatez(-100%); opacity: 0; filter: blur(5px); transition: 1s; transition-delay: 250ms;-100 .show:nth-child(1) { opacity: 1; filter: blur(0px); transform: translatex(0); transform: translatez(0); transform: translatey(0); } .show:nth-child(2) { opacity: 1; filter: blur(0px); transform: translatex(0); transform: translatez(0); transform: translatey(0); } .show:nth-child(3) { opacity: 1; filter: blur(0px); transform: translatex(0); transform: translatez(0); transform: translatey(0); } } and the javascript for observing them is: const observer = new intersectionobserver((entries) = { entries.foreach((entry) = { console.log(entry) if (entry.isintersecting) { entry.target.classlist.add('show'); } else { entry.target.classlist.remove('show'); } }); }); const hiddenelements = document.queryselectorall( .hidden, .hidden2, .hidden3 ); hiddenelements.foreach((el) = observer.observe(el)); here is the html for one of the two groups of showcased images. the items that want to be hidden are given the class hidden . section class= showcase !--this is the title, above the images (not part of them).-- h1 what is it? /h1 p create is a minecraft mod created by simibubi that allows you to make contraptions, br and automate processes using rotational power/stress units. br it adds many mechanical components and block variants to the game. br create is also a decoration mod. it adds a total of 550+ blocks! /p div class= row !--this is for each image.-- div class= showcase-col hidden img src= images/opengates.png div class= layer !--the text at the bottom of the images.-- h3 the gears br behind it all /h3 /div /div div class= showcase-col hidden img src= images/industrial-1.png div class= layer h3 the industrial br aspects /h3 /div /div div class= showcase-col hidden img src= images/nature.png div class= layer h3 the nature /h3 /div /div /div /section i am aiming to have the transitions change to simply fade in from the background instead of from the sides, but i can not manage to override the properties. this is below the previous code btw. i have tried changing the position of this media query in the css, but that resulted in nothing. i have tried redefining everything in the media query so that it should override the previous code, but it hasn't. i was expecting to have the items fade in from the void (from nothing which would be transform: translatez(-100%); , but that does nothing. in summary, i am wondering if there are any flaws in my code, and also how to override statements of animations as they are not currently being overridden. the link to the temproary site is: [LINK]/ where the site can be seen and inspected in mobile view to see the problem.
2024-02-21 07:02:36.490000000
you need to use paste0() in your ifelse() . you can also use %in% instead of | for multiple 'or' matches. you can also add multiple statements to a single mutate() . judging by what you are adding to the issues column, i'm guessing you are wanting to identify problems so you can correct the respective columns? in the case of preferred name, it's possible to copy preferred name from firstname to preferredname. if this is the case, update your question to include what your end goal is and i will update my answer. if not, this solution will be fine. library(dplyr) df - data.frame( preferredname = , firstname = c( mary , harold (harry) , ben , tristian (tri) , julia ), county = c( , other , warren , butler , ), date = as.date( 2024-02-15 ), address = c( 123 street , 42 ave , , , 520 road ) ) dfcor - df % % mutate(issues = ifelse((preferredname == & grepl( \\( , firstname)), preferred name , ), issues = ifelse(county %in% c( usa , , other , n/a ) & date = as.date( 2023-08-01 ), paste0(issues, county ), issues), issues = ifelse((address == & date = as.date( 2023-08-01 )), paste0( address , issues), issues), issues = trimws(issues)) dfcor preferredname firstname county date address issues 1 mary 2024-02-15 123 street county 2 harold (harry) other 2024-02-15 42 ave preferred name county 3 ben warren 2024-02-15 address 4 tristian (tri) butler 2024-02-15 address preferred name 5 julia 2024-02-15 520 road county
2024-02-16 00:37:32.143000000
i am new to kubeflow and i am trying to access the dashboard, but no success. i am running kubernetes locally installed via minikube. my lasta attempt was running the following command: kubectl create -f [LINK] -n kubeflow but it gives the following error: kubectl create -f [LINK]> -n kubeflow error: error parsing [LINK]>: error converting yaml to json: yaml: line 206: mapping values are not allowed in this context any help will be much appreciated. tried many other things according to gemini
2024-02-29 20:13:03.820000000
i have 8k data in my server. i want to use it into my android apps. i want to store volley response into my app for offline use. when phone data connection will off it will work and when data connection is available it will try to collect data from api response. please explain and suggest code or related tutorial.
2024-03-14 12:11:35.550000000
i tried to install nginx on a php docker container dockerfile from php:8.2-fpm instala nginx y herramientas necesarias run apt-get update && apt-get install -y nginx wget instala xdebug run pecl install xdebug && docker-php-ext-enable xdebug configura nginx copy ./nginx/default.conf /etc/nginx/conf.d/default.conf inicia php-fpm y nginx cmd [ nginx , -g , daemon off; ] docker-compose.yml version: '3' services: php-fpm: build: . volumes: - ./src:/var/www/html ports: - 9000:9000 - 80:80 default.conf server { listen 80; servername localhost; location / { root /var/www/html; index index.php; } location ~ \.php$ { fastcgipass localhost:9000; fastcgiindex index.php; include fastcgiparams; fastcgiparam scriptfilename $documentroot$fastcgiscript_name; } } and when i try to acces i get a 502 error, this is the nginx error log 2024/02/27 17:50:18 [error] 11#11: *19 connect() failed (111: connection refused) while connecting to upstream, client: 172.22.0.1, server: localhost, request: get / http/1.1 , upstream: fastcgi://127.0.0.1:9000 , host: localhost ther is something wrong on the config? on the console i can execute php files but not making request using the browser
2024-02-27 17:52:16.873000000
i am new to unity and this website and im making a code for a arduino braccio++ where i can control the robot arm live from my laptop on unity. i found the model and alot of the code online but had to change it a bunch for my own use. when the arduino.write is called for the first time it seemingly works fine but after the delay and it is called again, the whole of unity stops and i cant do anything until i unplug the arduino. however the debug.log shows me it stops at the yeild waitforseconds but when i unplug it the debug.log up to the arduino.write show up. the reason i think its the arduino.write and not the yeild waitforseconds is because when i take out the arduino.write it all runs fine but when its in the above happened. i will put my arduino and unity code below. unity code: using unityengine; using system.collections; using system.io.ports; public class arduinoserial : monobehaviour { public rotation servo1; public rotation2 servo2; public rotation3 servo3; public rotation4 servo4; public rotation5 servo5; public float delayseconds = 1f; public string portname; serialport arduino; bool startcommands = false; bool ready = false; void start () { arduino = new serialport (portname, 9600); arduino.open (); } void update () { debug.log ( broken ); if (startcommands == false) debug.log ( broken3 ); startcoroutine (sendcommands ()); } ienumerator sendcommands () { debug.log ( broken4 ); startcommands = true; //yield return null; if (startcommands == true){ debug.log ( broken8 ); } //arduino.close(); //arduino.open(); debug.log ( broken7 ); yield return new waitforseconds(delayseconds); debug.log ( broken5 ); arduino.close(); arduino.open(); ready = arduino.isopen; if (ready == false){ try{ arduino.open(); } catch{ debug.log ( no serial ); } } debug.log ( broken1 ); if (ready == true) { debug.log ( broken2 ); string str; string thetabasestr = (mathf.roundtoint(servo1.eulerangy)).tostring( 000 ); string thetashoulderstr = (mathf.roundtoint(servo2.servo2angle)).tostring( 000 ); string thetaelbowstr = (mathf.roundtoint(servo3.servo3angle)).tostring( 000 ); string thetawristverticalstr = (mathf.roundtoint(servo4.servo4angle)).tostring( 000 ); string thetawristrotationstr = (mathf.roundtoint(servo5.servo5angle)).tostring( 000 ); str = thetabasestr + thetashoulderstr + thetaelbowstr + thetawristverticalstr + thetawristrotationstr + \n ; debug.log ( send serial: + str); try{ arduino.write (str); } catch{ debug.log ( transmission failed ); } debug.log ( broken6 ); } startcommands = false; ready = false; } } / simplemovements.ino this sketch simplemovements shows how they move each servo motor of braccio created on 18 nov 2015 by andrea martino this example is in the public domain. / #include braccio++.h int stepdelay = 20; int m1 = 90; int m2 = 45; int m3 = 180; int m4 = 180; int m5 = 90; int m6 = 10; char incomingbyte; int row = 0; int row1 = 0; bool flipped = false; char indata[25]; bool isread = false; int index1 = 0; int array1[20][6]; int buttonpin = 2; void setup() { //initialization functions and set up the initial position for braccio //all the servo motors will be positioned in the safety position: //base (m1):90 degrees //shoulder (m2): 45 degrees //elbow (m3): 180 degrees //wrist vertical (m4): 180 degrees //wrist rotation (m5): 90 degrees //gripper (m6): 10 degrees braccio.begin(); serial.begin(9600); pinmode(buttonpin, output); } void loop() { / step delay: a milliseconds delay between the movement of each servo. allowed values from 10 to 30 msec. m1=base degrees. allowed values from 0 to 180 degrees m2=shoulder degrees. allowed values from 15 to 165 degrees m3=elbow degrees. allowed values from 0 to 180 degrees m4=wrist vertical degrees. allowed values from 0 to 180 degrees m5=wrist rotation degrees. allowed values from 0 to 180 degrees m6=gripper degrees. allowed values from 10 to 73 degrees. 10: the toungue is open, 73: the gripper is closed. / delay(10); readserialstr(); if (braccio.isbuttonpressedenter() == true){ braccio.moveto(m1, m2, m3, m4, m5, m6); row1 += 1; delay(2000); } delay(10); } void readserialstr() { flipped = false; digitalwrite(buttonpin, high); if (serial.available() 0) { char incomingbyte = serial.read(); if (flipped = false){ while (incomingbyte != '\n' && isdigit(incomingbyte)) { isread = true; delay(10); indata[index1] = incomingbyte; index1 += 1; incomingbyte = serial.read(); digitalwrite(buttonpin, low); } indata[index1] = '\0'; } serial.end(); serial.begin(9600); } if (isread) { char m1char[4]; char m2char[4]; char m3char[4]; char m4char[4]; char m5char[4]; char m6char[4]; m1char[0] = indata[0]; m1char[1] = indata[1]; m1char[2] = indata[2]; m1char[3] = '\0'; m2char[0] = indata[3]; m2char[1] = indata[4]; m2char[2] = indata[5]; m2char[3] = '\0'; m3char[0] = indata[6]; m3char[1] = indata[7]; m3char[2] = indata[8]; m3char[3] = '\0'; m4char[0] = indata[9]; m4char[1] = indata[10]; m4char[2] = indata[11]; m4char[3] = '\0'; m5char[0] = indata[12]; m5char[1] = indata[13]; m5char[2] = indata[14]; m5char[3] = '\0'; m6char[0] = indata[15]; m6char[1] = indata[16]; m6char[2] = indata[17]; m6char[3] = '\0'; m1 = atoi(m1char); m2 = atoi(m2char); m3 = atoi(m3char); m4 = atoi(m4char); m5 = atoi(m5char); m6 = atoi(m6_char); isread = false; index1 = 0; } }
2024-03-07 17:02:55.697000000
i ran into the same problem, and found this issue . as explained in the comment: redirection and mirroring only happens when pulling, not when pushing or logging in. it's an expected behaviour. when you push the image, it should be tagged as localhost:5000/hello-world. then you should be able to pull it as example.com/hello-world.
2024-03-26 09:58:53.923000000
i am trying to upgrade my spring boot from 2.7.x to 3.2.x. but the following code is not compiling. [USER].persistence.entity public class order { ... [USER](type = org.jadira.usertype.dateandtime.joda.persistentdatetime )//can't resolve method type private datetime createdon; } is joda datetime supported by hibernate 6.4.x? if yes, how to migrate the above code?
2024-03-06 12:38:37.593000000
i want to add a srt to a mp4 with matching names from same folder. i want to script it to keep doing this to all files in folder. this works if i do 1 at a time , but i have over 1,000 to do. how can i create a batch to do this ffmpeg -i source .mp4 -i source .srt -c copy -c:s movtext -metadata:s:s:0 language=eng destination .mp4 stream #0:0(und): video: h264 (high) (avc1 / 0x31637661), yuv420p(tv, bt709, progressive), 1280x694 [sar 1:1 dar 640:347], q=2-31, 1385 kb/s, 23.98 fps, 23.98 tbr, 24k tbn (default) metadata: handlername : videohandler vendorid : [0][0][0][0] stream #0:1(und): audio: aac (lc) (mp4a / 0x6134706d), 44100 hz, stereo, fltp, 192 kb/s (default) metadata: handlername : soundhandler vendorid : [0][0][0][0] stream #0:2(eng): subtitle: movtext (tx3g / 0x67337874) metadata: encoder : lavc60.40.100 mov_text [out#0/mp4 @ [HASH]] video:1027151kib audio:142295kib subtitle:31kib other streams:0kib global headers:0kib muxing overhead: 0.297458% size= 1172956kib time=01:35:42.13 bitrate=1673.4kbits/s speed= 763x
2024-02-26 14:51:24.930000000
easier way to do this: snippet: def removevowels(word: none) - str: letters = [char for char in word if char.lower() not in ''aaeeiioouuyy''] return ''.join(letters) wordwithwholealphabet = input( give me a string bro: ) print(removevowels(wordwithwholealphabet)) output: give me a string bro: lalalilalou lllll
2024-02-24 21:01:16.787000000
in my computer science class, i am assigned to code a mod for a basic snake game program. i decided to add a feature where when the user presses the space bar, it will shoot out a portal depending on what mode the user is in (similar to the game portal). to start with the basics, i decided to start with the shoot method, and wanted it to be animated. i managed to get the blue/enter portal working, but when i tried to implement the same methods to the orange/exit portal, it just automatically goes to the top of the screen. i have tried to change the variables from local to global, using extra functions instead of just doing it in one, banging my head against the wall over and over again but nothing is working. what am i doing wrong? code dictionary for portals enterp = 0 exitp = 1 portalpos = [ the first dict is the enter portal, and the second dict is the exit portal { x : random.randint(0, 31), y : random.randint(0, 23), dir : }, { x : random.randint(0, 31), y : random.randint(0, 23), dir : } ] key checks for event in pygame.event.get(): if event.type == quit: terminate() elif event.type == keydown: if (event.key == kleft or event.key == ka) and direction != right: direction = left elif (event.key == kright or event.key == kd) and direction != left: direction = right elif (event.key == kup or event.key == kw) and direction != down: direction = up elif (event.key == kdown or event.key == ks) and direction != up: direction = down elif event.key == kp: print(exitportanicount) elif event.key == kspace: if portalgunmode == ent : portalpos[enterp][ dir ] = direction portalpos[enterp]['x'] = wormcoords[head]['x'] portalpos[enterp]['y'] = wormcoords[head]['y'] enterportanicount = prange if portalgunmode == ext : portalpos[exitp][ dir ] = direction portalpos[exitp]['x'] = wormcoords[head]['x'] portalpos[exitp]['y'] = wormcoords[head]['y'] exitportanicount = prange elif event.key == k_escape: terminate() portal bound checks def checkportalbounds(): if portalpos[enterp]['x'] = 31: portalpos[enterp]['x'] = 31 elif portalpos[enterp]['x'] = 0: portalpos[enterp]['x'] = 0 elif portalpos[enterp]['y'] = 23: portalpos[enterp]['y'] = 23 elif portalpos[enterp]['y'] = 0: portalpos[enterp]['y'] = 0 if portalpos[exitp]['x'] = 31: portalpos[exitp]['x'] = 31 elif portalpos[exitp]['x'] = 0: portalpos[exitp]['x'] = 0 elif portalpos[exitp]['y'] = 23: portalpos[exitp]['y'] = 23 elif portalpos[exitp]['y'] = 0: portalpos[exitp]['y'] = 0 update portal animations def updateportalanims(): global enterportanicount, exitportanicount for i in range(prange): if not enterportanicount = 0 and not portalpos[enterp]['x'] == 31 and not \ portalpos[enterp]['x'] == 0 and not portalpos[enterp]['y'] == 23 and not \ portalpos[enterp]['y'] == 0: if portalpos[enterp]['dir'] == right: portalpos[enterp]['x'] += shootspeed enterportanicount -= 1 checkportalbounds() elif portalpos[enterp]['dir'] == left: portalpos[enterp]['x'] -= shootspeed enterportanicount -= 1 checkportalbounds() elif portalpos[enterp]['dir'] == up: portalpos[enterp]['y'] -= shootspeed enterportanicount -= 1 checkportalbounds() elif portalpos[enterp]['dir'] == down: portalpos[enterp]['y'] += shootspeed enterportanicount -= 1 checkportalbounds() if not exitportanicount = 0 and not portalpos[exitp]['x'] == 31 and not \ portalpos[exitp]['x'] == 0 and not portalpos[exitp]['y'] == 23 and not \ portalpos[exitp]['y'] == 0: if portalpos[exitp]['dir'] == right: portalpos[exitp]['x'] += 1 exitportanicount -= 1 checkportalbounds() elif portalpos[exitp]['dir'] == left: portalpos[exitp]['x'] -= 1 exitportanicount -= 1 checkportalbounds() elif portalpos[exitp]['dir'] == up: portalpos[exitp]['y'] -= 1 exitportanicount -= 1 checkportalbounds() elif portalpos[exitp]['dir'] == down: portalpos[exitp]['y'] += 1 exitportanicount -= 1 checkportalbounds() else: break
2024-03-01 15:43:31.103000000
what i ended up doing, was editing the reentrantlock.java file. i added a native function in runtime.java which then called the vm and handled the lock/unlock operation. inside src/java.base/share/native/libjava/runtime.c , i added the following function: jniexport void jnicall javajavalangruntimehandlelock(jnienv *env, jobject this) { jvmhandlelock(env); } then inside src/java.base/share/classes/java/lang/runtime.java : public native void handlelock(); finally, inside src/hotspot/share/prims/jvm.cpp : jvmentry(void, jvm_handlelock(jnienv* env)) if (thread && thread- isjavathread()) { javathread jt = (javathread ) thread; frame fr = jt- lastframe(); // to get the actual sender frame, we need to skip runtime and java.util.concurrent frames registermap map(jt, registermap::updatemap::skip, registermap::processframes::skip, registermap::walkcontinuation::skip); fr = fr.sender(&map); // is there a better way? fr = fr.sender(&map); if (fr.isinterpreted_frame()) { method *m = fr.interpreterframemethod(); address bcp = fr.interpreterframebcp(); ... } } jvmend i added the functions inside, make/data/hotspot-symbols/symbols-unix : jvmhandlelock then to actually handle the lock/unlock functions, i only needed to add a runtime call inside the corresponding function. e.g for src/java.base/share/classes/java/util/concurrent/locks/reentrantlock.java : public void lock() { sync.lock(); runtime.getruntime().handlelock(); }
2024-03-06 14:17:22.610000000
i'm trying to develop a chrome extension with react and wxt (web extension framework) but the project getting big and out of control, so i need react-devtools to point to me what the actually happening, keep checking variables and things! i've tried many different ways if you search for the answer but none of them work maybe too old (2 years) or not even related. i added a script tag with [LINK]> to the content script under the shadow root install react-devtools standalone app. i must install the csp unblock extension to use it in mv3. otherwise, you know csp error. run the extension and it just keeps flickering back and forth. or go to [LINK]> save the file and put it into the extension folder. it just shows: loading react element tree... if this seems stuck, please follow the troubleshooting instructions i've made it work once but i can't remember or backup the change because i was changed to vue that time
2024-02-29 05:23:02.893000000
probably something like this: pathlib.path(path).parent if not os.path.isdir(path) else pathlib.path(path)
2024-02-27 09:10:50.527000000
given the dataset below... col1 col2 var1 var2 var3 1 1 na na na 2 2 na na na 3 3 na 3 na 4 4 4 4 4 5 5 5 5 5 6 6 6 na 6 7 7 na 7 7 8 8 na na na 9 9 9 na na 10 10 na na na ...how can i remove rows containing nas for all select columns only at the start and end in an automated way (and preferably using tidyverse)? col1 col2 var1 var2 var3 3 3 na 3 na 4 4 4 4 4 5 5 5 5 5 6 6 6 na 6 7 7 na 7 7 8 8 na na na 9 9 9 na na so in the example above, how can i remove those rows containing nas for all of columns var1-var3 only at the start and end of the dataset? that is, only rows 1, 2 and 10 should disappear. (although row 8 contains nas for all of these select variables, it should not be removed as it is not at the 'start' or 'end' of the dataset.) here is my tidyverse solution: library(tidyverse) have - tibble(col1 = 1:10, col2 = 1:10, var1 = 1:10, var2 = 1:10, var3 = 1:10) have[c(1, 2, 10), 3:5] - na have[3, c(3, 5)] - na have[6, 4] - na have[7, 3] - na have[8, 3:5] - na have[9, 4:5] - na noselectvars - 3 the number of select variables want - have | mutate(nomissing = rowsums(across(-c(col1, col2), ~ is.na(.x)))) | slice(first(which(nomissing noselectvars)):n()) | slice(1:last(which(nomissing noselectvars))) | select(-nomissing) is there an existing function for this purpose, or a more elegant solution than mine?
2024-02-25 01:39:54.950000000
simpler solution to your request i have an element in alpinejs i want to get values of the data attributes when i click on that link. $el.dataset.url script src="//unpkg.com/alpinejs" defer /script div x-data="app" span click the button /span div button [USER].prevent="console.info($el.dataset); console.info('url: ' + $el.dataset.url);" data-video="videoid" data-url="tools/downloadthumbnail" data-quality="maxres" max res /button /div /div script document.addeventlistener('alpine:init', () = { alpine.data('app', () = ({})) }) /script
2024-03-15 10:21:14.733000000
i've set up a free cosmos db for mongodb vcore database without any issues, i've then been able to utilise the provided connection string to connect to the database with my credentials on both mongodbcompass and also azure data studio which confirms the string is working and allows access to the database in question. however, when i have then used the given string in my .net 8 capplication with mongodb.driver with the correct username and password inserted username : password i get the following exception every time it tries to set up the client: mongodb.driver.mongoconfigurationexception: the connection string 'mongodb+srv:// hidden [USER]-test.mongocluster.cosmos.azure.com/?tls=true&authmechanism=scram-sha-256&retrywrites=false&maxidletimems=120000' is not valid. the code to populate the client is: public networkservice(ioptions mongodbsettings mongodbsettings) { _client = new mongoclient(mongodbsettings.value.connectionlocal); } i've double checked the mongodbsettings.value.connectionlocal property and this is populated with the correct string given by azure cosmos db so i'm uncertain as to why i'm getting this error saying the string is not valid. this code also executes perfectly fine and apis all work as expected when i point connectionlocal string at my local mongodb database: connectionlocal : mongodb://localhost:27017/ ,
2024-02-29 21:23:09.167000000
i am trying to use the getlocaladdresslist() as referenced in [LINK]> i have translated the delphi code to c++ #include idstack.hpp tidstacklocaladdresslist llist; tidstacklocaladdress laddr; int idx; try { llist = new tidstacklocaladdresslist(); try { gstack- getlocaladdresslist(llist); for (idx = 0; idx llist- count - 1; idx++) { laddr = llist- addresses[idx]; switch (laddr- ipversion) { case idipv4: // add to list items listbox1- items- add(laddr- ipaddress); break; case idipv6: break; default:; } } } catch (...) { } } __finally { delete llist; } i get project project1.exe raised exception class 0xc0000005 with message 'exception 0xc0000005 encountered at address 0x408b38: access violation reading location 0x00000000'. when i execute gstack- getlocaladdresslist(llist); i assume that it is because i haven't made a gstack object, but i have no reference as to how/when to do that. i have looked on the web an can't seem to find any good references for using indy with builder :( can someone help out? thanks is advance.
2024-02-15 19:00:10.127000000
the partial keyword in cis a feature that allows to split the definition of a class, struct, interface, or method over two or more source files1. this can be particularly useful. when working on large projects, spreading a class over separate files enables multiple programmers to work on it at the same time. code can be added to the class without having to recreate the source file. when using source generators to generate additional functionality in a class the partial keyword can make code more organized and manageable, especially when working with large classes or when want to separate auto-generated code from own code.
2024-02-23 22:28:43.720000000
in my case i had the same probleme when reading the zip using ibm437. characters ¤øø§ were replace incorretly. using the charset ibm850 fixed the issue
2024-03-22 16:28:22.163000000
i'm getting the error on this line requestbody body = requestbody.create(json, json); i saw several post about add it manually, so i did it, i include the jar as part of the build path without any change. what would be the correct way to add it on eclipse? i'm using okhttp-4.12.0 and okio-3.7.0 i copied the code from the official web, i also added to build path the okio-3.7.0.jar but it looks like is not detecting or not relating the libraries. this is the code i'm using: mediatype json = mediatype.get( application/json ); okhttpclient client = new okhttpclient(); requestbody body = requestbody.create(json, json); request request = new request.builder().url(hosturl + url).post(body).build(); try (response response = client.newcall(request).execute()) { return response.body().string(); }
2024-02-07 22:02:16.223000000
this is the error i got httperrorresponse {headers: httpheaders, status: 200, statustext: 'ok', url: '[LINK]', ok: false, …} error : {error: syntaxerror: unexpected token ' ', !doctype ... is not valid json at json.parse ( anonymous …, text: ' !doctype html \r\n html lang= en \r\n head \n scrip…ain.js type= module \x3c/script /body \r\n /html \r\n'} headers : httpheaders {normalizednames: map(0), lazyupdate: null, lazyinit: ƒ} message : http failure during parsing for [LINK] name : httperrorresponse ok : false status : 200 statustext : ok url : [LINK] [[prototype]] : httpresponsebase i have my service where i make my .get [USER]() export class heroesservice{ urlheroes: string = [LINK] ; getinfoid(id:string): observable searchheroesresponse { return this.http.get searchheroesresponse (id) } constructor(private http:httpclient){} } and in my component where i subscribe the response [USER]({ selector: 'app-heroe', templateurl: './heroe.component.html', }) export class heroecomponent { ngoninit(){ this.activatedroute.params.pipe(switchmap(({id})= this.http.getinfoid(id))) .subscribe(info = console.log(info)); } constructor(private http: heroesservice, private activatedroute:activatedroute){} } i'm using a json server, where i have a localhost with this info (clipped information) { usuarios : [ { id : 1, usuario : john doe , email : john.doe[USER].com } ], heroes : [ { id : dc-batman , superhero : batman , publisher : dc comics , alterego : bruce wayne , firstappearance : detective comics #27 , characters : bruce wayne }, { id : dc-flash , superhero : flash , publisher : dc comics , alterego : jay garrick , firstappearance : flash comics #1 , characters : jay garrick, barry allen, wally west, bart allen } ] } i don't know what to do, i have searched on internet and they say something about html and json compatibility, but i'm new at angular and i'm following a course of it, so please be understandable. if you want more information about what i wanna do, i click a button from each heroe that sends the id to the url with a routerlink: button mat-button color= warn [routerlink]= ['/heroes', heroe.id] read more /button i do get the id in url, indeed it shows the heroe name in console when executing this code: ngoninit(){ this.activatedroute.params.subscribe( ({id}) = console.log(id)); }
2024-02-19 03:48:36.387000000
i'm trying to use the workday rest api to fetch & update some workers data. i'm having some difficulties to authenticate to it and the documentation is very poor on information to exploit the api. i've followed this link , which give guidelines to create oauth app inside workday which is a pretty common thing for api authentication. as i already have an isu working, i've created an api client for integrations giving me an client id & secret. the issue is that i manage to get an accesstoken using these information. usually this curl request should work to get my token : curl --location '{oauthurl}/token' \ --header 'content-type: application/x-www-form-urlencoded' \ --header 'authorization: basic {base64(clientid:clientsecret)}' \ --data-urlencode 'granttype=clientcredentials' but this returns : { error : invalid request } does anyone have any idea of to achieve this ? thank you ! edit : i've found a way to generate an accesstoken using api client for integrations : curl --location --request post '[LINK]' \ --header 'content-type: application/x-www-form-urlencoded' \ --data-urlencode 'granttype=refreshtoken' \ --data-urlencode 'clientid=xxx' \ --data-urlencode 'clientsecret=xxx' \ --data-urlencode 'refreshtoken=xxx' but i cannot find a way to interact with workday such as person api . i'm always getting an error when trying to get /people
2024-03-22 14:49:35.060000000
universal links don't work for certain apps (like facebook, etc..). for such cases, you should consider a webpage as a fallback.. meaning, that when opening the universal link, show a basic page with a button that will lead the user to the app (via urlscheme) or to appstore (if the urlscheme opening fails).
2024-03-12 09:53:46.040000000
your table definitions are a bit off but there is nothing about the delete on its own that won't work: create table users (id int primary key); create table groups (id int primary key); create table pages (id int primary key); create table grouproles ( id int primary key, groupid int references groups (id)); create table usermemberships ( userid int references users(id), grouproleid int references grouproles(id) ); insert into users values (1),(2); insert into groups values (1),(2); insert into grouproles values (1,1),(2,2); insert into usermemberships values (1,1),(2,2); that delete is perfectly valid, pretty much exactly how you typed it: demo at db fiddle delete from usermemberships where userid = 1 and grouproleid in ( select id from grouproles where groupid = 1 ); it can work fine with the whole array in it, too: delete from usermemberships using generateseries(1,arraylength(array[[1,1],[2,2]],1))as a(n) where userid = (array[[1,1],[2,2]])[n][1] and grouproleid in ( select id from grouproles where group_id = (array[[1,1],[2,2]])[n][2] ) returning *;
2024-02-27 07:07:20.393000000
so i had this same problem today. i had a mix of non-field errors and field errors that i wanted to all occur in the right place. and i had trouble finding all the relevant information in the django docs in one place, but i eventually routed around enough to piece everything together. (incidentally, there may be more than one way to do this, and note that i'm working with django 4.2). i didn't dig to fully understand the reasoning behind all of this, but my vague understanding is that fields have to be removed from the cleaned data (as processed by super().clean() . calling self.adderror(field, errorlist) will automatically remove a field from the cleaneddata . and to add a non-field error, you just supply the field argument as none . here's an example: class myform(form): f1 = charfield() f2 = charfield() def clean(self): super().clean() if bad in self.cleaneddata[ f1 ]: self.adderror( f1 , validationerror( invalid f1 )) if bad in self.cleaneddata[ f2 ]: self.adderror( f2 , validationerror( invalid f1 )) if not (self.cleaneddata[ f1 ] and self.cleaneddata[ f2 ]): self.adderror(none, validationerror( either f1 or f2 must be supplied )) to make them display in the desired places in your template, use {{ form.f1.errors }} , {{ form.f2.errors }} , and {{ form.nonfielderrors }} where you want them to go.
2024-03-23 18:26:52.413000000
i am trying to use a widget that i have created in reactjs using script tags like below- react implementation import react from react ; import { helmet } from react-helmet ; const dust = () = { return ( div helmet link href= [LINK] rel= stylesheet / script async src= [LINK] /script /helmet /div ); }; export default dust; for react js the above is working fine and i can see the content on my screen. nextjs implementation import image from next/image ; import { inter } from next/font/google ; import script from next/script ; import head from next/head ; const inter = inter({ subsets: [ latin ] }); export default function home() { return ( main classname={flex min-h-screen flex-col items-center justify-between p-24 ${inter.classname}} head link href= [LINK] rel= stylesheet / /head script src= [LINK] strategy= lazyonload / /main ); } the above is not working in next js, however i can see the js and css loading in network tab. what might be the solution in nextjs?
2024-03-10 10:14:54.637000000
the form of a digital image that is captured by a digital camera or can be displayed on a screen or printed on paper is a raster (regular 2d array) of digitized pixel colors. this is sometimes called a bitmap . so, does the file imagefile.ppn lose quality compared to jpegfile.jpg? it's a bit complicated, but the simple answer is no, not if the image converter does its job correctly. there is a quality factor associated with jpeg because it does not represent bitmaps directly. it employs a lossy representation that reduces the space required to represent images. that algorithm has an adjustable tradeoff between image size and lossiness that is characterized by what has come to be known as a quality factor: lower quality means smaller and lossier. that effect is incurred when the original image, whether on the image sensor of a digital camera, in computer memory, or in a file, is converted to jpeg format. it is then baked into that jpeg -- it represents a slightly different image than the original, because some information has been lost. to display a jpeg on a screen or convert it to a different format or otherwise edit it, it needs to be converted back to a bitmap. the result will not exactly match the original, but if the reconstruction is done correctly then a given jpeg should yield the same bitmap every time. there need not be any additional data loss at this step. ppm is different. it represents images directly in the form of bitmaps. ppm's bitmaps have three color channels supporting up to 16-bit depth each, which is more than your monitor does, very likely more than the images you are working with do, and more than your eyes can resolve. it can directly represent the bitmap produced by decompressing a jpeg without any loss of detail. it gets more complicated, though, when you bring color spaces, non-linearity, and similar considerations into it. these are not what jpeg's quality factor is about, but they do constitute another image-representation dimension that makes room for a ppm (or other image file) to be interpreted differently than the original image. jpeg's native color space is ycbcr, whereas ppm's is a calibrated, gamma-corrected rgb color space. conversion is necessary from one to the other, and this can introduce slight color distortions, especially with narrow color channels. note also that there are other image formats available. many employ one or another form of lossless compression to reduce images' storage sizes, which can be a distinct advantage over ppm. gif was at one time a major format, but it has somewhat fallen out of favor. png seems to be winning the bitmap wars at the moment. it is a more complex format than ppm, but very good, with built-in support for lossless compression.
2024-03-13 22:56:16.367000000
since chrome self-update i frequently getting this error message: unknown error: cannot connect to chrome at localhost:9222 from unknown error: devtools returned unknown type:sharedstorageworklet i search and could not found the specific sharedstorageworklet error. can someone help me? i didnt found any solution on web for this particular error, its a error with the latest version of chrome.
2024-03-01 14:06:18.663000000
the answer here is to use kubectl create token as suggested in kubectl create token sa -n namespace --audience sts.amazonaws.com note that no service account linked secret is required.
2024-03-14 16:14:49.450000000
i use webtestclient writing test cases in spring boot application. i need to get response body by compressing it. when i use resttemplate i can get compressed response body. httpheaders httpheaders = new httpheaders(); httpheaders.add( accept-encoding , gzip ); httpheaders.add( authorization , bearer + token);` httpentity requestdto requestentity = new httpentity (requestdto, httpheaders); esponseentity byte[] responseentity = resttemplate.exchange(baseurl + /test ,httpmethod.post, requestentity, byte[].class); when i use webtestclient, response body not compressed. is there any way to compressed response body when use webtestclient. byte[] response = webtestclient.post().uri( /test ) .header( authorization , bearer + token) .header(httpheaders.accept_encoding, gzip ) .body(bodyinserters.fromvalue(requestdto)).exchange() .expectstatus().isok() .expectbody(byte[].class) .returnresult().getresponsebody();
2024-03-20 11:05:02.490000000
i'd recommend using a profiler such as pyinstrument to find the slowest parts of your code. i've added the required lines to your code to use pyinstrument (see below). start with just a few of your data files. after you find and fix the main bottlenecks, try running your code with more data files. import os import numpy as np import xarray as xr from pyinstrument import profiler profiler = profiler() profiler.start() datafolder1 = 'k:\\reanalysis\era5\\100mspeed\\100m u\\1979-2022' datafolder2 = 'k:\\reanalysis\era5\\100mspeed\\100m v\\1979-2022' filenamesu = os.listdir(datafolder1) filenamesv = os.listdir(datafolder2) def readwind(filepathu, filepathv, latvalue, lonvalue): with xr.opendataset(filepathu) as ds: uwind = ds['u100'].sel(latitude=latvalue, longitude=lonvalue).values with xr.opendataset(filepathv) as ds: vwind = ds['v100'].sel(latitude=latvalue, longitude=lonvalue).values return uwind, vwind latvalue = 0 lonvalue = 90 alluwinds = [] allvwinds = [] for filenameu, filenamev in zip(filenamesu, filenamesv): filepathu = os.path.join(datafolder1, filenameu) filepathv = os.path.join(datafolder2, filenamev) uwind, vwind = readwind(filepathu, filepathv, latvalue, lonvalue) alluwinds.extend(uwind) allvwinds.extend(vwind) alluwinds = np.array(alluwinds) allvwinds = np.array(allvwinds) allwindspeeds = np.sqrt(np.square(alluwinds) + np.square(allvwinds)) percentile50 = np.percentile(allwindspeeds, 50) print(f the 50% percentile of the wind speed values is: {percentile50} ) profiler.stop() profiler.print()
2024-02-19 17:38:05.363000000
you did not specify what you wanted returned in the third column if the second column contained neither agree nor disagree. i returned a null, but you can easily change that. the algorithm consistis of adding a shifted column so that we are using a replacement on the same row, instead of using an index column. let source = excel.currentworkbook(){[name= table28 ]}[content], changed type = table.transformcolumntypes(source,{ { question category , type text}, { answer category , type text}}), trimmed text = table.transformcolumns(changed type ,{{ answer category , text.trim, type text}}), add shifted answer = table.fromcolumns( table.tocolumns(trimmed text ) & {list.removefirstn(trimmed text [answer category],1) & {null}}, type table[question category=text, answer category=text, shifted answer=text]), //note use of text.lower as m-code is case sensitive add result = table.addcolumn(add shifted answer , result , each if text.lower([answer category])= agree or text.lower([answer category])= strongly agree then null else if text.lower([answer category])= disagree or text.lower([answer category])= strongly disagree then [shifted answer] else null, type nullable text), removed columns = table.removecolumns(add result ,{ shifted answer }) in removed columns original data results
2024-03-14 22:40:34.327000000
i want smth like select jsonobject( all ) from refqueststate is it possible? i've tried jsonobject(*), jsonobject(), jsonobject(all) and it doesn't work for example create table refqueststate ( id integer primary key autoincrement not null, name text not null unique ); and i want to display result as json, but i donn't want to write smth like select jsonobject( 'id', id, 'name', name ) from refquest_state to get result as { id :1, name : in progress } { id :2, name : failed } { id :3, name : succeed }
2024-02-22 09:12:15.670000000
in javascript (including es6), there isn't a concept of upcasting or downcasting like in statically-typed languages such as java or c++. however, you can achieve a similar effect by treating objects of a subclass as if they were objects of their superclass. this is because javascript objects are based on prototypes, and you can always access properties and methods of the superclass from an instance of a subclass. here's an example: class animal { constructor(name) { this.name = name; } speak() { console.log(${this.name} makes a sound.); } } class dog extends animal { constructor(name, breed) { super(name); this.breed = breed; } speak() { console.log(${this.name} barks.); } fetch() { console.log(${this.name} fetches.); } } // creating an instance of the subclass const mydog = new dog('buddy', 'golden retriever'); // upcasting to the superclass const myanimal = mydog; // no explicit casting needed, just assign the subclass instance to a superclass reference // accessing superclass methods and properties myanimal.speak(); // outputs: buddy barks. console.log(myanimal.name); // outputs: buddy // you cannot access subclass-specific properties or methods // myanimal.fetch(); // this would throw an error since fetch is not a method of the superclass in this example, dog is a subclass of animal. when you assign an instance of dog to a variable of type animal, you're effectively treating it as an animal. however, you won't be able to access subclass-specific methods or properties using the superclass reference.
2024-03-17 07:34:05.553000000
operator skips leading whitespace, so it will never give you an empty string if the read is successful (which you are not checking for). you can break the stream with ctrl-c/z or ctrl-break (depending on platform) to put cin into an error state that makes statements like if (cin input) evaluate as false.
2024-02-29 19:52:00.930000000
in my android app i have a sql database. the user adds some data in app and he is able to backup the db to google drive. this works fine, i am doing it like this example: string path = getapplicationcontext().getdatabasepath( mydbname.db ).getabsolutepath(); file filepath = new file( path); driveservicehelper.createfilepdf(filepath).addonsuccesslistener(...) and in driveservicehelper.createfilepdf (part of code): com.google.api.services.drive.model.file filemetadata = new com.google.api.services.drive.model.file(); filemetadata.setname( mydbname.db ); filecontent mediacontent = new filecontent( application/vnd.sqlite3 , filepath); myfile = mdriveservice.files().create(filemetadata, mediacontent).execute(); the problem is, on google drive it is stored as db file, easily read by ani sql viewer. however i don't want anyone to stole the data from db, because it contains info that i have collected for years of experience (gardening). is there a way to store sql db file in encrypted format or is there a way to upload only one table from that database? actually i have 2 tables in db, one with data and second just with user data, which is related to table 1, but contains only id's from first table, not data. so i see as a better idea to backup only 2nd table with id's and not whole db.
2024-03-17 21:33:05.710000000
i solve doing this ... allhref = driver.findelements(by.xpath, //*[contains([USER], 'h2h-stats')] ) for ia in range(len(allhref)): allhref[ia].click() #sleep #do what you need to do ... #go back allhref = driver.findelements(by.xpath, //*[contains([USER], 'h2h-stats')] )
2024-02-26 11:31:29.583000000
i believe the issue is that inter/dynamictinybert was fine-tuned for abstractive qa. it means that it will only support `task= question-answering . please see the docs below. question answering question answering is another token-level task that returns an answer to a question, sometimes with context (open-domain) and other times without context (closed-domain). this task happens whenever we ask a virtual assistant something like whether a restaurant is open. it can also provide customer or technical support and help search engines retrieve the relevant information you"re asking for. there are two common types of question answering: extractive: given a question and some context, the answer is a span of text from the context the model must extract. abstractive: given a question and some context, the answer is generated from the context; this approach is handled by the text2textgenerationpipeline instead of the questionansweringpipeline shown below from transformers import pipeline questionanswerer = pipeline(task= question-answering ) preds = question_answerer( question= what is the name of the repository? , context= the name of the repository is huggingface/transformers , ) print( f score: {round(preds['score'], 4)}, start: {preds['start']}, end: {preds['end']}, answer: {preds['answer']} ) score: 0.9327, start: 30, end: 54, answer: huggingface/transformers
2024-02-13 17:17:10.037000000
i'm working on a project using laravel 10 with mysql and i have these tables: users ===== id | username | email --------------------- culinaries ========== id | title | coverphoto | status | type | userid -------------------------------------------------- paradises ========= id | title | coverphoto | status | type | userid -------------------------------------------------- notifications ============= id | type | postid | posttype ------------------------------- * type: enum('usercreatedculinary', 'usercreatedparadise', 'adminapprovedculinary', 'adminrejectedculinary', 'adminapprovedparadise', 'adminrejectedparadise', 'othernotiftypes') * post_type : string (only contains either 'culinary' or 'paradise' for now) * postid : unsignedbiginteger as foreign key to either 'culinaries' or 'paradises' tables based on the 'posttype'. i want to read the data from notifications table using this code in my api controller file: $culinarysubquery = culinary::select('id', 'title', 'coverphoto', db::raw(' culinary as modulename')) - where('status', 'published') - where('type', 'public'); $paradisesubquery = paradise::select('id', 'title', 'coverphoto', db::raw(' paradise as modulename')) - where('status', 'published') - where('type', 'public'); $subquery = $culinarysubquery- unionall($paradisesubquery); $notifications = notification::select('n.id', 'n.userid', 'users.username', 'n.posttype', 'n.postid') - leftjoin('users', 'users.id', '=', 'n.userid') - leftjoinsub($subquery, 'tbl', function ($join) { $join- on('tbl.id', '=', 'n.postid') - wherecolumn('tbl.modulename', '=', 'n.posttype'); }) - orderby('n.userid') - orderby('n.id') - get(); dd($notifications); but i'm getting the error message below when i try to run it on postman: message : method illuminate\\http\\jsonresponse::first does not exist. , exception : badmethodcallexception , what am i doing wrong? what is the solution? any help is appreciated.
2024-03-22 05:28:03.250000000
you can use the cli commands instead. open a terminal window, navigate to the directory containing your .csproj file and execute the commands from there. here's an example using the vs code terminal: [LINK]>
2024-03-19 12:13:05.763000000
i am returning text/xml responses in my spring boot application. however, there are two key issues that cannot be recoverable on the client side (since we are not authorized to make any change on the client): 1 - the first issue is: my responses do not include ?xml version= 1.0 encoding= utf-8 ? . how can i add this ? 2 - the second one is: content-length header is missing in my responses. how can i include this ? thank you for your valuable ideas.
2024-02-16 07:26:59.093000000
i've got a directory with multiple files: - file1.json - file2.txt - file3.json and i'm running a command that creates an output.txt in the same folder. and i want to create two artifacts, one with a zip with the files and another one with the output.txt . if i do something like this: artifacts: paths: - ${downloads_dir}/* - ${downloads_dir}/output.txt would it work? thank you all in advance!
2024-03-05 10:09:53.230000000
python can make reading an consistent number of lines per logical grouping quite easy. start by reading the whole file line-by-line, taking care to strip away the trailing linebreak; you can also replace the extraneous commas: with open( input.txt ) as f: lines = [x.strip() for x in f.readlines()] lines = [x.replace( , , ) for x in lines] lines = [x[:-1] for x in lines] to remove the trailing comma, to preserve commas inside the string print(lines) and lines now looks like: [ first name , address , city state zip , second name , second address , second city state zip , ] you can now do a simple assertion to make sure you have groups of three lines: assert len(lines) % 3 == 0, f len(lines)={len(lines)}; expected a multiple of 3 then create a loop that increments an index three-at-a-time to turn each chunk of three lines into three fields in a (csv) row: rows: list[list[str]] = [] for i in range(0, len(lines), 3): rows.append(lines[i : i + 3]) print(rows) [ [ first name , address , city state zip ], [ second name , second address , second city state zip ], ] finally, use the csv module to write those rows to a new csv file: import csv with open( output.csv , w , newline= ) as f: writer = csv.writer(f) writer.writerows(rows) all together, without the print statements: import csv with open( input.txt ) as f: lines = [x.strip() for x in f.readlines()] lines = [x.replace( , , ) for x in lines] assert len(lines) % 3 == 0, f len(lines)={len(lines)}; expected an even multiple of 3 rows: list[list[str]] = [] for i in range(0, len(lines), 3): rows.append(lines[i : i + 3]) with open( output.csv , w , newline= ) as f: writer = csv.writer(f) writer.writerows(rows) following lines = [x.replace( , , ) for x in text.splitlines()] assert len(lines) % 3 == 0, f len(lines)={len(lines)}; expected an even multiple of 3 ...
2024-02-12 02:30:07.987000000
i am upgrading my tomcat from version 7 to tomcat 8.5. authentication flow for my application is : initial request is redirected to okta, which sends authentication token. i set the token in cookies on root (/) path and send a redirect response, so that next time when request come, it will have valid authentication token and will be authenticated successfully. everything was working fine on tomcat 7. after upgrading to tomcat 8.5, this flow stopped working. reason is that now redirect request is blocking the cookies containing the authentication token. on debugging i found that now cookies are not set on the root(/) path but they are now set on the logging request path (/app/api/). one more difference which i found while comparision between cookies in browser is that: in tomcat 7 cookies are showing with path (/) on browser developer tool while in case of tomcat 8, it is showing as ( / ). i guess that browser is not able to understand the path in the response. i have tried changing context under conf/context.xmt with attribute sessioncookiepath= / and changing cookie processor to legacycookieprocessor but nothing is working. any help will be appreciated
2024-02-12 11:53:40.367000000
in this code example, an object is created within the storage of another object using placement new, and freed memory is never accessed. here's the sequence of events: new object1 is created in allocated memory. object2 is a union, so is not touched. object2 is created inside object1 using placement new object1 is destroyed. object2 is untouched. contents of object2 are printed object2 destructor is called memory for object1 is deleted is this safe? #include stdio.h #include new struct object2 { char a{1}; char b{2}; char c{3}; }; struct object1 { // // disconnect object2 lifetime from object1 // union { object2 o; }; char d{3}; object1() { } }; int main(int argc, char argv[]) { // // create object1, internal object2 is not yet constructed // object1 object1 = new object1(); // // create object2 inside object1 using placement new // new (&object1- o) object2(); object2* object2 = &object1- o; // // destroy object1, but do not free memory, and do not touch object2 // object1- ~object1(); // // access alive object2 inside object2's memory // printf( a,b,c=(%d,%d,%d)\n , object2- a, object2- b, object2- c); // // destroy object2 // object2- ~object2(); // // destructor has already been called, so safe to delete delete memory without calling destructor // operator delete(object1); }
2024-02-28 22:57:08.757000000
i have created a winservice (.net framework 4.7.2) where i am using owinselfhost to expose some webapis (get apis protected via jwt) on localhost. from another application (in .net 6), i am using restclient to make calls to the webapis. after a certain period of time, about 10-20 seconds, i receive this error (it"s not deterministic, but it occurs with this frequency): system.applicationexception: exception of type 'system.applicationexception' was thrown. --- system.net.http.httprequestexception: an error occurred while sending the request. --- system.io.ioexception: unable to read data from the transport connection: an existing connection was forcibly closed by the remote host.. --- system.net.sockets.socketexception (10054): an existing connection was forcibly closed by the remote host. --- end of inner exception stack trace --- going to c:\windows\system32\logfiles\httperr, i verified that the specific error is this: local ip 65000 http/1.1 get /serv/api/controllername/endpoint - - - connectionabandonedby_reqqueue - on the startup of the winservice, i tried to configure the following parameters following this as a guideline: <a href="[LINK] self-hosted webapi timeout settings</a> cvar listener = (owinhttplistener)appbuilder.properties[ microsoft.owin.host.httplistener.owinhttplistener ]; listener.listener.timeoutmanager.minsendbytespersecond = uint.maxvalue; listener.listener.timeoutmanager.entitybody = capp.entitybodyminutes; listener.listener.timeoutmanager.drainentitybody = capp.drainentitybodyminutes; listener.listener.timeoutmanager.requestqueue = capp.requestqueueminutes; listener.listener.timeoutmanager.idleconnection = capp.idleconnectionminute; listener.listener.timeoutmanager.headerwait = capp.headerwaitminutes; however, the problem persists. do you have any other ideas?? thanks in advance
2024-02-13 18:06:23.390000000
you can launch a headless android emulator with this command, it works without needing to be logged into desktop on the remote or if you use wayland. emulator $nameofavd -no-window if you are logged into an x session, this way it renders on the gpu it's a little bit faster. display=:0 emulator $nameofavd -gpu host -no-window then forward the adb port onto the network with: adb forward tcp:1234 tcp:5555 adb tcpip 5555 ssh -nfg -l 1234:localhost:5555 localhost add the 1234 tcp port to your firewall. on your local machine, connect to the emulator's adb instance: adb connect lan ip :1234 then use scrcpy or android studio's built-in screen mirroring to access the device's screen.
2024-03-05 19:31:59.850000000
you don't even need jq to search for a zone name. simply use --query option to aws cli like so: aws route53 list-hosted-zones-by-name --query hostedzones[?name=='example.com.'].id --output text
2024-02-29 22:16:05.883000000
you can try in the following way. gte:2022-02-07t00:00:00.000z lte:2022-02-07t23:59:59:999z the reason for this is that: to expand a bit on the answer, the dates are stored as full date-time value. think of it as isodate( 2022-02-07t00:00:00.000z ) is storing february 7th 2022 at midnight. comparing isodate( 2022-02-07t00:00:00.000z ) and isodate( 2022-02-07t01:00:00.000z ) will not show them as equal since the full "datetime" is being compared. this answer has been quoted from: query date range of the same day thank you wedothebest4you
2024-03-27 11:19:42.363000000
weird. i know the solution, but it is rather a fix. the true reason stays hidden for me. the outer layer of the issue is that the subject property is null when submitting the form. it should have the values from the form. the form is submitted correctly. i will show only the relevant part: model you are binding to form: public class subjectname { public string? subname { get; set; } public virtual icollection evaluationtweak ? evaluationtweaks { get; set; } } problem here are the evaluationtweaks , if you remove them, everything work as expected. now the reason why. check the evaluationtweak class: public class evaluationtweak { [required] public virtual subjectname subject { get; set; } = new(); } when you remove the subject property, it will also work... note that you can replace it with int subjectid so you keep the reference there. also note that removing subject property from evaluationtweak will not change the db schema. but why is that a problem? i have no idea, my guess is - it has somehow problem with serialization? it is perceived as circular reference maybe? would be nice to investigate further.
2024-02-24 18:34:28.623000000
i have previously used regular stack navigator from [USER]-navigation/stack , i've decided to migrate to native stack from [USER]-navigation/native-stack , but cardstyleinterpolator doesn't exist as an option in native stacks, which i've used for animating background colour opacity with screen transition progress: const forvertical = ({ current, inverted, layouts: { screen }, }: stackcardinterpolationprops) = { const translatey = multiply( current.progress.interpolate({ inputrange: [0, 1], outputrange: [screen.height, 0], extrapolate: 'clamp', }), inverted, ); const overlayopacity = current.progress.interpolate({ inputrange: [0, 1, 2], outputrange: [0, 0.85, 0.85], }); return { cardstyle: { transform: [ { translatey, }, ], }, overlaystyle: { opacity: overlayopacity, }, }; }; which i've used as cardstyleinterpolator i've found a workaround for the slide from bottom animation using presentation: 'transparentmodal', animation: 'slidefrombottom', which results in a correct behaviour for modal presenting what's left is just animating the background opacity, that's what i'm struggling with. is this possible somehow using native stack only without using cardstyleinterpolator ? here's snack with example of what i've had and what i'm trying to achieve: [LINK]>
2024-02-20 15:01:25.910000000
i have installed vagrant in my system and wanted to install ubuntu/jammy64 through vagrant but got the following error while installing bringing machine 'default' up with 'virtualbox' provider... == default: box 'ubuntu/jammy64' could not be found. attempting to find and install... default: box provider: virtualbox default: box version: = 0 ` how can i solve this issue and start vm command used 1. vagrant init ubuntu/jammy64 { init was successful } 2. vagrant up
2024-03-09 09:55:19.820000000
i'm new to android programming and json. i'm trying to call a authentication api with volley post request. i'm trying get the the username and password inserted from the user and sending to the api and returning the response. it seems i can't directly pass the input into json body of the request. can you point a way to input the data getting from the user input to json body request. i'm attaching my code, so you can get a better idea. thanks. public class loginactivity extends appcompatactivity { button loginbtn; edittext inputusername, inputpassword; string url; requestqueue queue; stringrequest stringrequest; [USER] protected void oncreate(bundle savedinstancestate) { super.oncreate(savedinstancestate); setcontentview(r.layout.activitylogin); loginbtn = findviewbyid(r.id.btnlogin); inputusername=findviewbyid(r.id.edittextusername); inputpassword=findviewbyid(r.id.edittextpassword); url= [LINK] ; try { queue=volley.newrequestqueue(this); jsonobject jsonbody = new jsonobject(); string username = inputusername.gettext().tostring(); string password = inputpassword.gettext().tostring(); jsonbody.put( username , username); jsonbody.put( password , password); final string mrequestbody = jsonbody.tostring(); stringrequest=new stringrequest(request.method.post, url, new response.listener string () { [USER] public void onresponse(string response) { toast.maketext(loginactivity.this, response,toast.lengthlong).show(); } }, new response.errorlistener() { [USER] public void onerrorresponse(volleyerror error) { toast.maketext(loginactivity.this, error.tostring(),toast.lengthlong).show(); } }){ [USER] public map string, string getheaders() throws authfailureerror { hashmap header= new hashmap(); header.put ( accept , / ); header.put ( content-type , application/json ); return header; } [USER] public string getbodycontenttype() { return application/json; charset=utf-8 ; } [USER] public byte[] getbody() throws authfailureerror { try { return mrequestbody == null ? null : mrequestbody.getbytes( utf-8 ); } catch (unsupportedencodingexception uee) { volleylog.wtf( unsupported encoding while trying to get the bytes of %s using %s , mrequestbody, utf-8 ); return null; } } //return response code }; login_btn.setonclicklistener(new view.onclicklistener() { [USER] public void onclick(view view) { queue.add(stringrequest); } }); } catch (jsonexception e) { e.printstacktrace(); } } }
2024-03-14 15:15:27.790000000
create two columns and create a heading as the first row is excluded. one for withdrawals and one for deposits for bank transactions for example. use the formulas before for the deposit column and for the withdrawn column for all rows. be sure the formatting is set to currency instead of accounting to remove the parenthesis for negative currency entries. withdrawn column: =if(b2 1,b2, ) deposit column: =if(b2 1, ,b2)
2024-02-22 04:28:01.057000000
i am novice on axis framework and handling a legacy support product. while i upgraded one of my service from axis 1.62 to axis 1.79. service started giving error only on higher prod. it is works fine on dev and qa environment. i am able to browse to wsdl in all cases. while on prod it is giving below error when i hit service using postman: the endpoint reference (epr) for the operation not found is [LINK]> and the wsa action = null. if this epr was previously reachable, please contact the server administrator. and on prod environment when i provide soapaction header on postman it works as expected. so i am not able to understand why same code with same version of axis works on lower environment while without soapaction header and gives error on prod? can we disable soapaction validation, it will break huge number of clients. i tried adding soapaction header using servletfilter but with no luck. any help would be greatly appreciated.
2024-03-14 22:35:53.950000000
i found a solution for this problem that works for api's from 28 to 34. the solution was to remove flagnotfocusable flag from here val layoutflag: int = windowmanager.layoutparams.typeapplicationoverlay val params = windowmanager.layoutparams( windowmanager.layoutparams.matchparent, windowmanager.layoutparams.matchparent, layoutflag, windowmanager.layoutparams.flaglayoutinscreen , pixelformat.translucent ) and then we setup a listener with our view like this: if (build.version.sdkint = build.versioncodes.p) { composeview.addonunhandledkeyeventlistener{ view, event - if ( event.action == keyevent.keycodeback || event.action == keyevent.keycodesoftleft || event.action == keyevent.keycodesoftright || event.action == keyevent.keycodehome || event.action == keyevent.keycodemove_home) { previouscallcard.finishedshowingcard(this) windowmanager.removeview(composeview) this[USER].stopself() true } else { false } } } since apis above 28 have the soft left or right as a way to get back instead of the back button we handle all those keycodes to handle the back effect. i still don't know if that an accurate and well implemented or not so if anyone has any enhancement i would be happy to see your contribution.
2024-03-22 11:45:26.317000000
assume i have the following bar chart made with library(plotly) (the space on the right side is intentional): library(dplyr) library(plotly) library(tidyr) d - tibble(cat = letters[1:3], val = c(25, 10, 30), total = 40) (bars - d % % mutate(remaining = total - val) % % pivotlonger(cols = c(val, remaining)) % % plotly(x = ~ value, y = ~ cat, type = bar , orientation = 'h', color = ~ name, colors = c( #[HASH] , #[HASH] )) % % layout(xaxis = list(title = na, range= c(0, 60)), yaxis = list(title = na), showlegend = false, barmode = stack )) i now would like to inset the following pie charts at x == 50 and at the corresponding y-position: pies - d % % rowwise() % % groupmap(~ plotly(.x) % % add_pie(values = ~ c(val, total - val), marker = list(colors = c( #[HASH] , #[HASH] )))) the expected outcome looks like this (done by manually pasting the pies into the bar chart): ideally the xa-axis would just span until 40 and there is no visible axis below the pies. p.s: i figured in this reprex that the colors are also messed up, how would i adjust the colors in the pie chart such that they match the colors in the bar chart?
2024-03-15 14:38:19.257000000
let's say currently i make 100 publish calls on a channel with each payload being a singleton list of some object. does merging these into a single publish call with payload being the list of 100 objects improve performance? (especially the load on redis' cpu)
2024-03-08 08:36:16.117000000
yes, as you've linked, it's possible to send the card message itself via graph, but you're limited in terms of how to deal with the response, especially in a web app scenario. if you're ok with them clicking a button that loads your web page in a browser, then you can simply use a open url type of card action button (see here for more: [LINK]>). if you want the button to perform an action within teams though, then you need to use a bot behind the scenes. you can consider the flow bot, as part of a power automate workflow, or you can build your own bot to receive the message and take the relevant action. both of these are broader topics than i can dive into here but you can see for instance [LINK]> and [LINK]>
2024-03-17 09:15:39.390000000
you can make a copy of the entries in your dataframe which end with ? , . or ! ; strip the punctuation from the end and then join that back to the original dataframe, sorting the index to keep the two versions of strings with punctuation together: punc = df[df['msg'].str.endswith(('?', '.', '!'))].copy() punc['msg'] = punc['msg'].str.rstrip('?.!') out = pd.concat([df, punc]).sortindex(kind='stable',ignoreindex=true) output: msg | label 0 hello | 1 1 hi! | 0 2 hi | 0
2024-02-19 11:12:18.050000000
mathematically, i'm trying to calculate x ^t a x , where x is an n-dimensional coordinate and a a n-dimensional square matrix. however, i'd like to efficiently calculate this for a set of coordinates. for example, in two dimensions: import numpy as np x = np.column_stack([[1,2,3,4,5],[6,7,8,9,0]]) a = np.array([[1,0],[0,2]]) print(x[0] @ a @ x[0]) works how can i get efficiently an array of x[i] @ a @ x[i]? y = [x[i] @ a @ x[i] for i in range(x.shape[0])]
2024-03-20 17:46:17.673000000
does a transaction occur in this case? [USER] [USER]( /api/v1/student/z ) [USER] public class someservicecontroller { [USER] public othersomeservice othservice; [USER] public responseentity ? someservicecontrollers() { othservice.z(); return responseentity.ok( ok ); } } public class othersomeservice { [USER] private someservice s; public somedata z() { s.a(); } } public class someservice { public somedata a() { b(); } [USER] public somedata b() { //some codes } } chatgpt4 says it was created, but when i try, there is no transaction manager in the logs. can you help me? log is as follows o.s.security.web.filterchainproxy : secured get /api/v1/student/z o.s.web.servlet.dispatcherservlet : get /api/v1/student/z , parameters={} s.w.s.m.m.a.requestmappinghandlermapping : mapped to com.management.studentmanagement.controller.someservicecontroller#someservicecontrollers() o.j.s.openentitymanagerinviewinterceptor : opening jpa entitymanager in openentitymanagerinviewinterceptor o.s.w.s.m.m.a.httpentitymethodprocessor : using 'text/plain', given [/] and supported [text/plain, /, application/json, application/*+json] o.s.w.s.m.m.a.httpentitymethodprocessor : writing [ ok ] o.j.s.openentitymanagerinviewinterceptor : closing jpa entitymanager in openentitymanagerinviewinterceptor o.s.web.servlet.dispatcherservlet : completed 200 ok
2024-03-31 07:53:21.247000000
i am trying to create azure function app under azure sentinel using gcp(google cloud platform) data connector, under arm template edit script i tried with sku mentionining and its paramaters i passed, but the requirement does not satisfied. inside sentinel create azure function with premium or standard plan, by defualt it is creating y1 sku(which is consumption based plan). arm template script for modifications: functionappplansku : { type : string , defaultvalue : ep1 , allowedvalues : [ ep1 , ep2 , ep3 ], metadata : { description : specifies the azure function hosting plan sku. } after changing the script under arm template and deployment done, deployment getting successful but when i check the plan of the function app it is not changed as expected. it is y1 - consumption only.
2024-02-10 15:00:54.823000000
the windows-2022 runner does not include the targeting pack for 4.6.1 (since it is out of support). if using a windows-2019 runner is unacceptable you can download and install the targeting pack into the runner. at the time of writing this works: - name: download 461 targeting pack uses: suisei-cn/actions-download-file@[HASH] 1.6.0 id: downloadfile remember to give an id if you need the output filename with: url: [LINK] target: public/ - name: install targeting pack shell: cmd working-directory: public run: ndp461-devpack-kb3105179-enu.exe /q
2024-02-27 22:38:12.353000000
i'm attempting to make my api match this response: { data : [ { id : 1, name : emma smith , avatar : avatars/300-6.jpg , email : smith[USER].com , position : art director , role : administrator , lastlogin : yesterday , twosteps : false, joinedday : 10 nov 2022, 9:23 pm , online : false }, { id : 2, name : melody macy , initials : { label : m , state : danger }, email : melody[USER].com , position : marketing analytic , role : analyst , lastlogin : 20 mins ago , twosteps : true, joinedday : 10 nov 2022, 8:43 pm , online : false }, { id : 3, name : max smith , avatar : avatars/300-1.jpg , email : max[USER].com , position : software enginer , role : developer , lastlogin : 3 days ago , twosteps : false, joinedday : 22 sep 2022, 8:43 pm , online : false }, { id : 4, name : sean bean , avatar : avatars/300-5.jpg , email : sean[USER].com , position : web developer , role : support , lastlogin : 5 hours ago , twosteps : true, joinedday : 21 feb 2022, 6:43 am , online : false }, { id : 5, name : brian cox , avatar : avatars/300-25.jpg , email : brian[USER].com , position : ui/ux designer , role : developer , lastlogin : 2 days ago , twosteps : true, joinedday : 10 mar 2022, 9:23 pm , online : false }, { id : 6, name : mikaela collins , initials : { label : m , state : warning }, email : mik[USER].com , position : head of marketing , role : administrator , lastlogin : 5 days ago , twosteps : false, joinedday : 20 dec 2022, 10:10 pm , online : false }, { id : 7, name : francis mitcham , avatar : avatars/300-9.jpg , email : f.mit[USER].com , position : software arcitect , role : trial , lastlogin : 3 weeks ago , twosteps : false, joinedday : 10 nov 2022, 6:43 am , online : false }, { id : 8, name : olivia wild , initials : { label : o , state : danger }, email : olivia[USER].com , position : system admin , role : administrator , lastlogin : yesterday , twosteps : false, joinedday : 19 aug 2022, 11:05 am , online : false }, { id : 9, name : neil owen , initials : { label : n , state : primary }, email : owen.neil[USER].com , position : account manager , role : analyst , lastlogin : 20 mins ago , twosteps : true, joinedday : 25 oct 2022, 10:30 am , online : false }, { id : 10, name : dan wilson , avatar : avatars/300-23.jpg , email : dam[USER].com , position : web desinger , role : developer , lastlogin : 3 days ago , twosteps : false, joinedday : 19 aug 2022, 10:10 pm , online : false } ], payload : { pagination : { page : 1, firstpageurl : /?page=1 , from : 1, lastpage : 3, links : [ { url : null, label : &laquo; previous , active : false, page : null }, { url : /?page=1 , label : 1 , active : true, page : 1 }, { url : /?page=2 , label : 2 , active : false, page : 2 }, { url : /?page=3 , label : 3 , active : false, page : 3 }, { url : /?page=2 , label : next &raquo; , active : false, page : 2 } ], nextpageurl : /?page=2 , itemsperpage : 10, prevpage_url : null, to : 10, total : 21 } } } source - [LINK]> currently i'm using the standard laravel pagination: return users::paginate(); however it does not break the json response down to what the frontend application i'm using is looking for which is: payload : { pagination : {
2024-02-25 04:52:03.487000000
i found opening the program i was running the script from as an 'administrator' let me create the log in event viewer. also: if not sourceexist then dim ev as new eventlogpermission(eventlogpermissionaccess.administer, . ) ev.permitonly() eventlog.createeventsource(cs, testlog ) end if will mean you have to set up id messages, or get this at the top of your message. (the description for event id 51001 from source test cannot be found. either the component that raises this event is not installed on your local computer or the installation is corrupted. you can install or repair the component on the local computer.) to be simple, do this instead: if not sourceexist then eventlog.createeventsource(sourcename, logname) end if this will stop the message above from occuring - instead only showing the message you sent to the eventlog.
2024-03-13 22:08:59.967000000
there are multiple ways to get your expected output in pyspark as follows: using equalnull : df.filter(~equalnull(col( first ), col( last ))) using nvl : df.filter(nvl(col( first ), lit( )) != nvl(col( last ), lit( ))) using ifnull : df.filter(ifnull(col( first ), lit( )) != ifnull(col( last ), lit( ))) you can do this in many other ways like <a href="[LINK]> did in their answer, using expr or just sparksql. for me, the best approach is using equal_null because it matches your requirement exactly and doesn't require any additional literals (empty strings) like the other approaches.
2024-03-12 20:53:31.807000000
if you can maintain it globally, checkboxtheme: checkboxthemedata( side: materialstateborderside.resolvewith( (set materialstate states) { if (states.contains(materialstate.selected)) { return const borderside(color: colors.red, width: 1.0); } return const borderside(color: colors.black, width: 1.0); }, ), fillcolor: materialstateproperty.all color (colors.white), checkcolor: materialstateproperty.all color (colors.blue), ),
2024-02-28 09:25:17.640000000
if you want the contours plot of the copula with margins n(0,1) and n(0,1), one option is to manually define its density: library(copula) cop - normalcopula(0.7) density of the bivariate distribution with copula 'cop' and margins n(0,1), n(0,1) f - function(x, y) { dcopula(c(pnorm(x), pnorm(y)), cop) dnorm(x) dnorm(y) } then you can use the usual contour function. but it's easier to create this distribution with mvdc : library(copula) mv - mvdc( normalcopula(0.7), margins = c( norm , norm ), parammargins = list(list(mean = 0, sd = 1), list(mean = 0, sd = 1)) ) then you can directly call contour on this multivariate distribution: contour(mv, dmvdc, xlim = c(-3, 3), ylim = c(-3, 3), asp = 1)
2024-03-20 09:01:18.010000000
you can use dictionary unpacking . here's a simple example: def f(a = 1, b = 2): print(f {a = }, {b = } ) f(b=5) output: a = 1, b = 5 kwargs = { b : 5} f(**kwargs) output: a = 1, b = 5 which can be applied to your scenario: def editfeaturesretry(edittype, data): agollayer.edit_features(**{edittype: data}) editfeaturesretry( adds , datalayer)
2024-03-05 05:06:53.847000000
this is for api 21 ! after messing around with this for several days, i gave up on xml (and the documentation) and did the java thing. get the action bar. loop through its children. when the title view is found, do things to it! final view actionbarview = this.findviewbyid(r.id.actionbar); final viewgroup actionbarviewgroup = (viewgroup) actionbarview; for (int i = 0; i actionbarviewgroup.getchildcount(); i++) { final view actionbarchildview = actionbarviewgroup.getchildat(i); if (actionbarchildview instanceof appcompattextview) { final appcompattextview titletext = (appcompattextview) actionbarchildview; // 50 pixels will be 25dp assuming your screen density is 2:1 titletext.setpadding(0, 50, 0, 0); titletext.settextsize(typedvalue.complexunit_sp, 30); titletext.settextcolor(0xff00ff00); break; } } more info can be found <a href="[LINK]>
2024-03-01 08:30:39.583000000
i have added a html code inside the product title so - on the other products gallery page its showing finely but when we open the product it shows title in html format enter image description here do you have any idea gow to resolve this
2024-03-28 14:12:11.897000000
make sure that its executable is included in the system's path environment variable. sometimes, even if git is installed, visual studio code may not recognize it if it's not in the path. if it's not: add the directory path where git is installed (e.g., c:\program files\git\bin) to the list of paths environment variables . note: make sure to separate each path with a semicolon if there are multiple paths listed.
2024-03-14 12:03:38.153000000
i'd generate the dotenv artifact in your nodejs job, and then added another deploy job that depends on the nodejs job, including the artifacts -- this way you can pass results of one job to another. job1: script: node my_script.js envars.sh artifacts: reports: dotenv: envars.sh job2: script: deploy.sh needs: - job: job1 optional: false artifacts: true
2024-02-14 13:55:15.153000000
if someone is looking for a cross-platform way to redirect the output to the null file , then we can use os.devnull attribute to specify that we want to output to be redirected to the null file . import sys import os stderr = sys.stderr stdout = sys.stdout print(os.devnull) print( hell o world! ) sys.stdout = open(os.devnull, 'w') sys.stderr = open(os.devnull, 'w') print( this is redirected to null ) output: hell o world! first, we are creating a backup for the default standard output and standard error : stderr = sys.stderr stdout = sys.stdout then we display msg to the default standard output : print( hell o world! ) next, we are redirecting the output messages and the error messages to the null file : sys.stdout = open(os.devnull, 'w') sys.stderr = open(os.devnull, 'w') here we are verifying that the output is redirected to the null file : print( this is redirected to null ) if in case, you want to find where the null file is located it may return different results because this depends on the operating system on which the python program is running: print(os.devnull) it is also possible that you want to reset where output and error messages are displayed to the default standard output and standard error in that case you can use this approach: import sys import os stderr = sys.stderr stdout = sys.stdout sys.stdout = open(os.devnull, 'w') sys.stderr = open(os.devnull, 'w') print( this is redirected to null. ) sys.stderr = stderr sys.stdout = stdout print( this is redirected to the standard output. ) output: this is redirected to the standard output.
2024-03-12 03:27:49.210000000
from your code, i see that by brackets you mean parentheses . this option is configurable using arrow-function-parentheses . documentation in your case, you want: arrowparens: avoid
2024-02-25 08:42:06.233000000
i have two text files, one with patient ids (subjects.txt) and one with some measures(numberofslices.txt). both text files contain 1 column only. in a third file (.xmlg), i have put specific words (e.g.,numberofbrainslices) that i want to be automatically substituted for the 1st patients id with the 1st measure, then the 2nd patients id and the second measure. however, with the current loop, i am producing these .xmlg files with a different patientid but with the same measure (the number of slices) as i don't know how to write the nested loop and make it increase by 1 position every time, so patient and measure go in parallel - second patient - second measure, third patient - third measure etc. does anyone know how to do that in bash? #!bin/bash for sub in cat subjects.txt do for f in cat numberofslices.txt do sed -i s/numberofbrainslices/${f}/g \ /data/mrimeasures/analysis/set_files/${sub}.xmlg; done done
2024-02-11 05:10:56.847000000
the error insufficient privileges to complete the operation usually occurs if the microsoft entra id application doesn't have required permissions to perform the action. initially i got the same error : to resolve the error, make sure to grant user.read.all application type api permission as you are making use of client credential flow. user.read.all delegated type api permission must be granted when you make use of user interactive flow. grant the user.read.all application type api permission like below: i am able to successfully retrieve the users with upn like below: using microsoft.graph; using azure.identity; using microsoft.graph.models.odataerrors; class program { static async task main(string[] args) { var scopes = new[] { [LINK] }; var clientid = clientid ; var tenantid = tenantid ; var clientsecret = clientsecret ; var options = new clientsecretcredentialoptions { authorityhost = azureauthorityhosts.azurepubliccloud, }; var clientsecretcredential = new clientsecretcredential( tenantid, clientid, clientsecret, options); var graphclient = new graphserviceclient(clientsecretcredential, scopes); try { var result = await graphclient.users[ user[USER].onmicrosoft.com ].getasync(); console.writeline($ user details: {result.displayname}, {result.mail}, {result.id} ); } catch (odataerror odataerror) { console.writeline(odataerror.error?.code); console.writeline(odataerror.error?.message); throw; } } } reference: get a user - microsoft graph v1.0 | microsoft
2024-03-25 14:25:15.193000000
enumset is a collection and should be treated like one. [USER] [USER](name = rights , joincolumns = [USER](name = myfile_id )) [USER](enumtype.string) private set right rights; important: program to the interface set and use the implementation enumset in setter and constructor. private set right rights; public void setrights(enumset right rights) { this.rights = rights; }
2024-03-10 01:42:59.977000000
i have a microsoft outlook msg file which has an embedded msg file. i can get the attachment, save it as a file, but i cannot access it as a msg object. i am using c, i cannot use c++ or cor any other language. to get the size of the attachment, i read the prattachsize property of the attachment, but the size i get is smaller than the size of the file when i save the attachment. i tried to use prattachdataobj , prattachdatabin and mapiobject in order to get the attached embedded msg, but all these functions fail. how do i get the attached msg as a proper msg?
2024-03-12 08:42:10.840000000
i'm trying to build docker image from jenkins pipeline and it is building successfully, but when i try to inspect working directory of newly built image, it is showing as root(/) eventhough in last step of docker build pwd is /home/node/app/test. any suggetions? my dockerfile: from opensuse-base:15.5-0086 env testuser=test env testgroup=test env testuid=1999 env testgid=1999 env testhid=1002 env vaultversion=1.15.0 create app directory workdir /home/node/app/test env export term=xterm env export debianfrontend=noninteractive bundle app source copy . . opensuse zypper package manager run zypper --non-interactive update run zypper --non-interactive install vim npm install openjdk run zypper --non-interactive install java-17-openjdk run export javahome=$(java -xshowsettings:properties -version 2 &1 /dev/null | grep 'java.home' | awk '{print $3}') && \ echo export javahome=$javahome /etc/profile && \ echo export path=\$path:\$javahome/bin /etc/profile env javahome=${javahome} ##testhid is the docker group id of the host machine run groupadd -f ${testgroup} -g ${testgid} && \ useradd ${testuser} --comment test user --uid ${testuid} --gid ${testgid} --create-home --home-dir /home/${testuser} --shell /bin/bash && \ groupadd -g ${testhid} docker && \ usermod -ag docker ${testuser} ; run chown -r ${testuser}:${testgroup} testdist expose 3000 entrypoint [ /bin/bash , ./entrypoint.sh ] user ${test_user} run pwd cmd node server jenkins logs: #13 [9/11] run groupadd -f test -g 1999 && useradd test --comment test user --uid 1999 --gid 1999 --create-home --home-dir /home/test --shell /bin/bash && groupadd -g 1002 docker && usermod -ag docker test ; #13 0.891 group 'mail' not found. creating the user mailbox file with 0600 mode. #13 done 1.0s #14 [10/11] run chown -r test:test mfsigndist #14 done 1.1s #15 [11/11] run pwd #15 0.822 /home/node/app/test #15 done 0.9s #16 exporting to image #16 exporting layers #16 exporting layers 6.9s done #16 writing image sha256:[HASH] done #16 naming to docker.io/library/test-sign-pr:1666 0.0s done #16 done 6.9s + docker run --rm test-sign-pr:1666 pwd / [pipeline] } [pipeline] // container screenshot is here note: altered some key words in the logs. works as expected when you build on local.
2024-03-19 09:46:35.230000000
i developed a flutter app that i want to release commercially for ios. i want to set up a one time purchase but offer the user a free trial before he has to pay. is there a way to release an app in this fashion without having to implement some logic in the app itself. all the information i find online makes me unsure on how to approach this. as far as i know a fixed price is handeld entirely by apple and subscriptions have a trial option. is there a way to combine the fixed price with a trial tho?
2024-03-14 17:32:48.243000000
there is no built-in mechanism for debugging add-ins on android. you can treat office web add-ins like a regular web applications and use any remote javascript debugging techniques available for android. for example, take a look at remotejs , trackjs or js console . also see node.js debugging in vs code if you are building your add-in on top of nodejs.
2024-02-24 00:25:07.520000000
my code is supposed to read math problems from a csv file and then check if they are answered correctly, but whenever i use the getline() it just doesn't get the first two characters from each line. (also ignore the comments those are from the base file i had to use) my code: #include iostream #include fstream #include cstdlib #include climits #include vector #include string using namespace std; int main(){ fstream fin; int comma; float cans, sans, question, correct; fin.open( problems.csv ); if (fin.fail()) { cerr file cannot be opened for reading. endl; exit(1); // exit if failed to open the file } vector string row; // new string variable string line, word, temp, problem; correct = 0; question = 0; while(fin temp) { question ++; row.clear(); // this loop reads the file line-by-line // extracting 5 values on each iteration getline(fin, line); comma = line.find( , ); cans = stod(line.substr(comma + 1)); cout ( question ) line.substr(0,comma) ? ; cin sans; if (sans == cans){ cout true endl; correct ++; } else { cout false endl; } fin.ignore(int_max, '\n'); //skips to the end of line, //ignorring the remaining columns // for example, to print the date and east basin storage: //cout date eastst endl; } } csv file: code output: i tried using temp instead of line , but then my conversion to int s didn't work.
2024-02-14 17:56:05.203000000
when i send this request for authorization: [LINK]> i receive the error : some requested scopes cannot be shown: [[LINK]] if you are a developer of metricinsights.com, see the error details (error details contain only the request i sent). error 400: invalidscope but when i remove prompt=consent part i successfully get an authcode and exchange on token but i don't receive a refresh token. what is wrong? i expect to receive refresh token on every auth error example
2024-02-16 14:19:57.487000000
the dispatching method simply does not find your s3 implementation inside your r6 class. it does find it, however, if you define it in the global environment: tmp - r6::r6class( tmp , list( a = list(x = 1), b = data.frame(x = 1), divide = function(x) usemethod( divide ), testlist = function() self$divide(self$a), testdf = function() self$divide(self$b) ) ) divide.list - function(x) x$x/4 divide.data.frame - function(x) x$x/2 tmp2 - tmp$new() tmp2$testdf() [1] 0.5 tmp2$testlist() [1] 0.25
2024-02-26 12:22:36.607000000