Imported Upstream version 5.2.0.175

Former-commit-id: bb0468d0f257ff100aa895eb5fe583fb5dfbf900
This commit is contained in:
Xamarin Public Jenkins (auto-signing)
2017-06-07 13:16:24 +00:00
parent 4bdbaf4a88
commit 966bba02bb
8776 changed files with 346420 additions and 149650 deletions

View File

@@ -14,6 +14,124 @@ indent_size = 4
[project.json]
indent_size = 2
# C# files
[*.cs]
# New line preferences
csharp_new_line_before_open_brace = all
csharp_new_line_before_else = true
csharp_new_line_before_catch = true
csharp_new_line_before_finally = true
csharp_new_line_before_members_in_object_initializers = true
csharp_new_line_before_members_in_anonymous_types = true
csharp_new_line_within_query_expression_clauses = true
# Indentation preferences
csharp_indent_block_contents = true
csharp_indent_braces = false
csharp_indent_case_contents = true
csharp_indent_switch_labels = true
csharp_indent_labels = flush_left
# avoid this. unless absolutely necessary
dotnet_style_qualification_for_field = false:suggestion
dotnet_style_qualification_for_property = false:suggestion
dotnet_style_qualification_for_method = false:suggestion
dotnet_style_qualification_for_event = false:suggestion
# only use var when it's obvious what the variable type is
csharp_style_var_for_built_in_types = false:none
csharp_style_var_when_type_is_apparent = false:none
csharp_style_var_elsewhere = false:suggestion
# use language keywords instead of BCL types
dotnet_style_predefined_type_for_locals_parameters_members = true:suggestion
dotnet_style_predefined_type_for_member_access = true:suggestion
# name all constant fields using PascalCase
dotnet_naming_rule.constant_fields_should_be_pascal_case.severity = suggestion
dotnet_naming_rule.constant_fields_should_be_pascal_case.symbols = constant_fields
dotnet_naming_rule.constant_fields_should_be_pascal_case.style = pascal_case_style
dotnet_naming_symbols.constant_fields.applicable_kinds = field
dotnet_naming_symbols.constant_fields.required_modifiers = const
dotnet_naming_style.pascal_case_style.capitalization = pascal_case
# static fields should have s_ prefix
dotnet_naming_rule.static_fields_should_have_prefix.severity = suggestion
dotnet_naming_rule.static_fields_should_have_prefix.symbols = static_fields
dotnet_naming_rule.static_fields_should_have_prefix.style = static_prefix_style
dotnet_naming_symbols.static_fields.applicable_kinds = field
dotnet_naming_symbols.static_fields.required_modifiers = static
dotnet_naming_style.static_prefix_style.required_prefix = s_
dotnet_naming_style.static_prefix_style.capitalization = camel_case
# internal and private fields should be _camelCase
dotnet_naming_rule.camel_case_for_private_internal_fields.severity = suggestion
dotnet_naming_rule.camel_case_for_private_internal_fields.symbols = private_internal_fields
dotnet_naming_rule.camel_case_for_private_internal_fields.style = camel_case_underscore_style
dotnet_naming_symbols.private_internal_fields.applicable_kinds = field
dotnet_naming_symbols.private_internal_fields.applicable_accessibilities = private, internal
dotnet_naming_style.camel_case_underscore_style.required_prefix = _
dotnet_naming_style.camel_case_underscore_style.capitalization = camel_case
# Code style defaults
dotnet_sort_system_directives_first = true
csharp_preserve_single_line_blocks = true
csharp_preserve_single_line_statements = false
# Expression-level preferences
dotnet_style_object_initializer = true:suggestion
dotnet_style_collection_initializer = true:suggestion
dotnet_style_explicit_tuple_names = true:suggestion
dotnet_style_coalesce_expression = true:suggestion
dotnet_style_null_propagation = true:suggestion
# Expression-bodied members
csharp_style_expression_bodied_methods = false:none
csharp_style_expression_bodied_constructors = false:none
csharp_style_expression_bodied_operators = false:none
csharp_style_expression_bodied_properties = true:none
csharp_style_expression_bodied_indexers = true:none
csharp_style_expression_bodied_accessors = true:none
# Pattern matching
csharp_style_pattern_matching_over_is_with_cast_check = true:suggestion
csharp_style_pattern_matching_over_as_with_null_check = true:suggestion
csharp_style_inlined_variable_declaration = true:suggestion
# Null checking preferences
csharp_style_throw_expression = true:suggestion
csharp_style_conditional_delegate_call = true:suggestion
# Space preferences
csharp_space_after_cast = false
csharp_space_after_colon_in_inheritance_clause = true
csharp_space_after_comma = true
csharp_space_after_dot = false
csharp_space_after_keywords_in_control_flow_statements = true
csharp_space_after_semicolon_in_for_statement = true
csharp_space_around_binary_operators = before_and_after
csharp_space_around_declaration_statements = do_not_ignore
csharp_space_before_colon_in_inheritance_clause = true
csharp_space_before_comma = false
csharp_space_before_dot = false
csharp_space_before_open_square_brackets = false
csharp_space_before_semicolon_in_for_statement = false
csharp_space_between_empty_square_brackets = false
csharp_space_between_method_call_empty_parameter_list_parentheses = false
csharp_space_between_method_call_name_and_opening_parenthesis = false
csharp_space_between_method_call_parameter_list_parentheses = false
csharp_space_between_method_declaration_empty_parameter_list_parentheses = false
csharp_space_between_method_declaration_name_and_open_parenthesis = false
csharp_space_between_method_declaration_parameter_list_parentheses = false
csharp_space_between_parentheses = false
csharp_space_between_square_brackets = false
# C++ Files
[*.{cpp,h,in}]
curly_bracket_next_line = true

View File

@@ -28,6 +28,7 @@ msbuild.wrn
# Cross building rootfs
cross/rootfs/
cross/android-rootfs/
# add x86 as it is ignored in 'Build results'
!cross/x86

View File

@@ -1 +1 @@
1.0.27-prerelease-01224-01
1.0.27-prerelease-01420-03

View File

@@ -46,12 +46,14 @@ Coding Guidelines
- [Breaking Change Rules](coding-guidelines/breaking-change-rules.md)
- [Project Guidelines](coding-guidelines/project-guidelines.md)
- [Adding APIs Guidelines](coding-guidelines/adding-api-guidelines.md)
- [Legal Native calls](building/pinvoke-checker.md)
Building from Source
====================
- [Building CoreFX on FreeBSD, Linux and OS X](building/unix-instructions.md)
- [Code Coverage](building/code-coverage.md)
- [Cross Building](building/cross-building.md)
- [Package and Assembly File Versioning](building/versioning.md)
Other Information

View File

@@ -1,4 +1,4 @@
#.NET Core Applications
# .NET Core Applications
NETCoreApp is the [target framework](https://docs.nuget.org/Create/TargetFrameworks) that represents .NET Core applications
@@ -11,7 +11,7 @@ Friendly name | .NET Core Application
NuGet folder name | `netcoreapp1.0`
NETStandard version supported | `netstandard1.6`
##FAQ
## FAQ
**Q: What is a .NET Core application?**
**A:** A .NET Core application is an application that can run on any .NET Core runtime: CoreCLR (current), .NETNative (future). It can run on one of many .NET core platforms (Windows, OSX, Linux). It relies on the host provided by the given runtime. It's a composable framework built from the packages on which the application depends. Its assembly loading policy permits newer versions of dependencies without any application configuration (eg: BindingRedirects are not required).

View File

@@ -1,52 +0,0 @@
# Building Tests Against Packages
## Usage Scenarios
### Build the product locally and then test what you've built
In this scenario, you produce a product build, including packages, then generate test project.json's which reference the locally built package versions and finally build tests compiling against the local packages.
1. Build the product and packages
- ```Build.cmd -BuildTests=false```
2. Generate Test project.json files
- ```sync.cmd -t -- /p:"SkipCleanPackages=true" /p:"PackagesDrops=[ProjectDir]bin/packages/[Release|Debug]/"```
- /p:SkipCleanPackages=true is required for release branches where the packages folder is cleaned during every build.
3. Build Tests against packages
- ```build-tests.cmd -BuildTestsAgainstPackages -- /p:"PackagesDrops=[ProjectDir]bin/packages/[Release|Debug]/"```
- -BuildTestsAgainstPackages tells the build to use the project.json files you generated in the "Generate Test project.json files" step
- /p:"PackagesDrops=[ProjectDir]bin/packages/[Release|Debug]/" tells the build to use the packages from your local build drop.
### Download product from an Azure blob
This scenario skips the product build step, and instead downloads packages from Azure blob storage
1. Sync product from Azure
- ```sync.cmd -ab -AzureAccount=dotnetbuildoutput -AzureToken=******** -Container=[Azure container name] -- /p:"DownloadDirectory=[ProjectDir]Packages\AzureTransfer" /p:"SkipCleanPackages=true"```
2. Generate Test project.json files
- ```sync.cmd -t -- /p:"SkipCleanPackages=true" /p:"PackagesDrops=[ProjectDir]Packages/AzureTransfer/[Release|Debug]/"```
- /p:SkipCleanPackages=true is required for release branches where the packages folder is cleaned during every build.
3. Build Tests against packages
- ```build-tests.cmd -BuildTestsAgainstPackages -- /p:"PackagesDrops=[ProjectDir]Packages/AzureTransfer/[Release|Debug]/"```
- -BuildTestsAgainstPackages tells the build to use the project.json files you generated in the "Generate Test project.json files" step
- /p:"PackagesDrops=[ProjectDir]Packages/AzureTransfer/[Release|Debug]/" tells the build to use the packages from the Azure download (DownloadDirectory).
### Use a versions file for specifying package versions
This scenario uses a versions file (https://github.com/dotnet/versions/blob/master/build-info/dotnet/corefx/master/Latest_Packages.txt, for example) to determine what package versions to build tests against.
1. Generate Test project.json files using a 'versions' file.
- ```sync.cmd -t -- /p:"SkipCleanPackages=true" /p:"VersionsFiles=[local version file path]"```
- /p:SkipCleanPackages=true is required for release branches where the packages folder is cleaned during every build.
2. Build Tests against packages
- ```build-tests.cmd -BuildTestsAgainstPackages -- /p:"PackagesDrops=[ProjectDir]bin/packages/[Release|Debug]/"```
- -BuildTestsAgainstPackages tells the build to use the project.json files you generated in the "Generate Test project.json files" step
- /p:"PackagesDrops=[ProjectDir]bin/packages/[Release|Debug]/" tells the build to use the packages from your local build drop.
- If the package versions you are referencing have been published publically, you can omit the "PackagesDrops" property.
## Common Questions
- **How do I know it worked?** The best way is to look in the log for the compilation line ("csc.exe") and ensure that its references now point to packages (packages\blah) where previously they pointed to build product binaries (bin\blah).
- **Why are there build failures?** Not all of our tests build nicely against package references due to differences in the public surface area (compiling against the reference assembly versus an implementation assembly). In cases where we were unable to sync / restore (packages were unavailable or other restore problems), we've opted those projects out of this process by adding "KeepAllProjectReferences" or "KeepProjectReference" (added to a Project Reference's metadata) to the test project.
- **Where are the generated project.json files?** Generated project.json files get created under "[ProjectDir]bin/obj/generated".

View File

@@ -1,9 +1,7 @@
Code Coverage
=============
"Code coverage" is a measure that indicates how much of our library code is exercised by our test suites. We measure code coverage using the [OpenCover](https://github.com/opencover/opencover), and a report of our latest code coverage results can be seen by clicking the coverage badge on the [CoreFX home page](https://github.com/dotnet/corefx):
[![Coverage status](https://img.shields.io/badge/coverage-report-blue.svg)](http://dotnet-ci.cloudapp.net/job/dotnet_corefx_coverage_windows/lastBuild/Code_Coverage_Report/)
"Code coverage" is a measure that indicates how much of our library code is exercised by our test suites. We measure code coverage using the [OpenCover](https://github.com/opencover/opencover), and a report of our latest code coverage results can be seen by clicking the coverage badge on the [CoreFX home page](https://github.com/dotnet/corefx), linking to the latest [Coverage Report](https://ci.dot.net/job/dotnet_corefx/job/master/job/code_coverage_windows/Code_Coverage_Report/).
This report shows each library currently being tested with code coverage and provides statistics around the quality of the code coverage for the library. It also provides a line-by-line breakdown of what lines are being covered and what lines are not.
@@ -30,6 +28,16 @@ An issue need not be addressed in its entirety. We happily accept contributions
Code coverage runs are performed by Jenkins approximately twice a day. The results of these runs are all available from the site linked to by the code coverage badge on the home page.
## PR Code Coverage Runs
Jenkins can create a coverage report using your PR. Ask for it using `@dotnet-bot test code coverage please`.
After it's done the report can be found in the build log, it looks like eg
`https://ci.dot.net/job/dotnet_corefx/job/master/job/code_coverage_windows_prtest/16/artifact/bin/tests/coverage`
then add index.htm on the end:
`https://ci.dot.net/job/dotnet_corefx/job/master/job/code_coverage_windows_prtest/16/artifact/bin/tests/coverage/index.htm`
You can navigate to this from your PR by clicking the "Details" link to the right of the code coverage job listed at the bottom of the PR after having issued the above request to dotnet-bot. In the Jenkins UI for the resulting build, click the "Build Artifacts" link and navigate through the resulting hierarchy to the index.htm file.
## Local Code Coverage Runs
You can perform code coverage runs locally on your own machine. Normally to build your entire CoreFX repo, from the root of your repo you'd run:
@@ -50,11 +58,11 @@ You can also build and test with code coverage for a particular test project rat
msbuild /t:BuildAndTest
To do so with code coverage, as with ```build``` append the ```/p:Coverage=true``` argument:
To do so with code coverage, append the ```/p:Coverage=true``` argument:
msbuild /t:BuildAndTest /p:Coverage=true
The results for this one library will then also show up in the aforementioned index.htm file. For example, to build, test, and get code coverage results for the System.Diagnostics.Debug library, from the root of my repo I can do:
The results for this one library will then show up in the aforementioned index.htm file. For example, to build, test, and get code coverage results for the System.Diagnostics.Debug library, from the root of the repo one can do:
cd src\System.Diagnostics.Debug\tests\
msbuild /t:BuildAndTest /p:Coverage=true
@@ -63,22 +71,11 @@ And then once the run completes:
..\..\..\bin\tests\coverage\index.htm
## Code coverage with mscorlib code
## Code coverage with System.Private.CoreLib code
Some of the libraries for which contracts and tests live in the corefx repo are actually implemented in the core runtime library in another repo, e.g. the implementation that backs the System.Runtime contract is in System.Private.Corlib.dll in either the coreclr or corert repo. To run coverage reports for these projects, you need to build mscorlib locally from the coreclr repo.
Some of the libraries for which contracts and tests live in the corefx repo are actually fully or partially implemented in the core runtime library in another repo, e.g. the implementation that backs the System.Runtime contract is in System.Private.CoreLib.dll in either the coreclr or corert repo. To run coverage reports for these projects, you need to build System.Private.CoreLib locally from the coreclr repo. To get coverage of System.Private.CoreLib while running the tests for a particular library:
The following steps can be used manually to produce a coverage report, but a customizable batch file can be found [here](facade-code-coverage.bat). Changing the parameters in the first couple of lines lets you run a coverage report easily for any facade project.
1. Follow the steps outlined at [Testing with Private CoreClr Bits](https://github.com/dotnet/corefx/blob/master/Documentation/project-docs/developer-guide.md#testing-with-private-coreclr-bits). Make sure to include the optional steps listed as being required for code coverage.
2. Add /p:CodeCoverageAssemblies="System.Private.CoreLib" to the previously discussed msbuild command, e.g. msbuild /t:BuildAndTest /p:Coverage=true /p:CodeCoverageAssemblies="System.Private.CoreLib"
1. Build the local test project (`msbuild /T:Build`)
3. Build coreclr locally in Debug or Release (`build.cmd all Debug skiptests`)
2. Navigate to the built test directory in the corefx bin (e.g. `bin/tests/AnyOS.AnyCPU.Debug/System.Runtime/netcoreapp1.0` for `System.Runtime`
4. Delete `coreclr.dll`, `mscorlib.dll`, `mscorlib.ni.dll`, `System.Private.CoreLib.dll` and `System.Private.CoreLib.ni.dll` from that directory
5. Copy all files in the coreclr `bin` directory to the test directory
6. Copy all files in the coreclr `bin/PDB` directory to the test directory
7. Run an OpenCover command with `xunit.console.netcore.exe`. For example:
<corefx-root>/packages/OpenCover/<opencover-version>/tools/OpenCover.Console.exe -oldStyle -filter:"+[*]* -[*.Tests]* -[xunit.*]*" -excludebyfile:"*\Common\src\System\SR.*" -nodefaultfilters -excludebyattribute:*.ExcludeFromCodeCoverage* -skipautoprops -hideskipped:All -threshold:1 -returntargetcode -register:user -targetdir:<path-to corefx-bin> -target:CoreRun.exe -output:coverage.xml -targetargs:"xunit.console.netcore.exe System.Runtime.Tests -xml testResults.xml -notrait Benchmark=true -notrait category=OuterLoop -notrait category=failing -notrait category=nonwindowstests"
8. Run a ReportGenerator command with the generated `coverage.xml` file. For example:
<corefx-root>/packages/ReportGenerator/<opencover-version>/tools/ReportGenerator.exe -reporttypes:Html;Badges -reports:coverage.xml
The resulting code coverage report should now also include details for System.Private.CoreLib.

View File

@@ -103,3 +103,24 @@ prajwal@ubuntu ~/corefx $ ./scripts/arm32_ci_script.sh \
```
The Linux ARM Emulator is based on the soft floating point and thus the native binaries are generated for the armel architecture. The corefx binaries generated by the above command can be found at `~/corefx/bin/Linux.armel.Release`, `~/corefx/bin/Linux.AnyCPU.Release`, `~/corefx/bin/Unix.AnyCPU.Release`, and `~/corefx/bin/AnyOS.AnyCPU.Release`.
Build corefx for a new architecture
===================================
When building for a new architecture you will need to build the native pieces separate from the managed pieces in order to correctly boot strap the native runtime. Instead of calling build.sh directly you should instead split the calls like such:
Example building for armel
```
build-native.sh -buildArch=armel
--> Output goes to bin/runtime/netcoreapp-Linux-Debug-armel
build-managed.sh -buildArch=x64
--> Output goes to bin/runtime/netcoreapp-Linux-Debug-x64
```
The reason you need to build the managed portion for x64 is because it depends on runtime packages for the new architecture which don't exist yet so we use another existing architecture such as x64 as a proxy for building the managed binaries.
Similar if you want to try and run tests you will have to copy the managed assemblies from the proxy directory (i.e. `netcoreapp-Linux-Debug-x64`) to the new architecture directory (i.e `netcoreapp-Linux-Debug-armel`) and run code via another host such as corerun because dotnet is at a higher level and most likely doesn't exist for the new architecture yet.
Once all the necessary builds are setup and packages are published the spliting of the build and manual creation of the runtime should no longer be necessary.

View File

@@ -1,6 +1,6 @@
# Running XUnit tests cross platform
Unlike Windows, where we run tests as part of the build, we have a seperate
Unlike Windows, where we run tests as part of the build, we have a separate
explicit testing step on Linux and OSX. Over time, this special step will go
away in favor of a similar "run tests during the build" model.
@@ -17,12 +17,12 @@ instructions assume you are building for Linux, but are easily modifiable for OS
From the root of your CoreCLR enlistment on Linux, run `./build.sh Release` in
order to build.
2. A corresponding version of mscorlib.dll. Depending on your platform, this may
2. A corresponding version of System.Private.Corelib.dll. Depending on your platform, this may
be produced when you run `build.sh`. Otherwise, this can be produced by
running `build.cmd linuxmscorlib Release` from a CoreCLR enlistment on
Windows. Remember that the runtime and mscorlib are tightly coupled with
running `build.cmd linuxmscorlib Release` (it's `mscorlib` for historical reasons) from a CoreCLR enlistment on
Windows. Remember that the runtime and System.Private.Corelib are tightly coupled with
respect to object sizes and layout so you need to ensure you have either a
release coreclr and release mscorlib or debug coreclr and debug mscorlib.
release coreclr and release System.Private.Corelib or debug coreclr and debug System.Private.Corelib.
3. A Linux build of CoreFX. We currently have experimental support for building
CoreFX on Linux via `build.sh`.
The other option is:
@@ -56,17 +56,15 @@ If needed, copy the packages folder:
# rsync -v -f ~/mnt/matell3/d/git/corefx/packages ~/git/corefx/packages
```
If needed, copy mscorlib:
If needed, copy System.Private.Corelib:
```
# rsync -v -r ~/mnt/matell3/d/git/coreclr/bin/Product/ ~/git/coreclr/bin/Product/
```
Then, run the tests. We need to pass an explict path to the location of CoreCLR
and mscorlib.dll.
Then, run the tests. We need to pass an explict path to the location of CoreCLR.
```
# ./run-test.sh --coreclr-bins ~/git/coreclr/bin/Product/Linux.x64.Release \
--mscorlib-bins ~/git/coreclr/bin/Product/Linux.x64.Release \
# ./run-test.sh --coreclr-bins ~/git/coreclr/bin/Product/Linux.x64.Release
```
run-test.sh should now invoke all the managed tests.

View File

@@ -1,82 +0,0 @@
@echo off
:: Example settings for System.Runtime
SET project=System.Runtime
SET msbuildargs=/T:Build
SET testsubdir=AnyOS.AnyCPU.Debug
SET filter="+[*]* -[*.Tests]* -[*]System.Collections.* -[*]System.Diagnostics.* -[*]System.Globalization.* -[*]System.IO.* -[*]System.Reflection.* -[*]System.Resources.* -[*]System.Runtime.* -[*]System.Security.* -[*]System.StubHelpers.* -[*]System.Threading.* -[*]Microsoft.* -[*]Windows.* -[*]System.App* -[*]System.Text.Decoder* -[*]System.Text.Encoder* -[*]System.Text.*Encoding -[*]System.Text.Internal* -[xunit.*]*"
:: Update this when OpenCover or ReportGenerator are updated
SET opencoverversion=4.6.519
SET reportgeneratorversion=2.4.3
:: Assumes that the corefx and coreclr repo folders are in the same parent folder
SET root=C:\Users\Hugh\Documents\Github
SET corefx=%root%\corefx
SET coreclr=%root%\coreclr
SET packages=%corefx%\packages
SET opencover=%packages%\OpenCover\%opencoverversion%\tools\OpenCover.Console.exe
SET reportgenerator=%packages%\ReportGenerator\%reportgeneratorversion%\tools\ReportGenerator.exe
SET targetdir=%corefx%\bin\tests\%testsubdir%\%project%.Tests\netcoreapp1.0
SET resultsfile=testresults.xml
SET coveragefile=coverage.xml
SET coveragedir=coverage
SET originalfolder=%cd%
SET sourcefolder=%corefx%\src\%project%\tests
SET coreclrbuild=%coreclr%\bin\Product\Windows_NT.x64.Debug
SET coreclrbuild=%coreclr%\bin\Product\Windows_NT.x64.Release
:: Build the library
cd %sourcefolder%
msbuild %msbuildargs%
cd %originalfolder%
:: Delete old files (see #8381 for why)
del %targetdir%\mscorlib.dll
del %targetdir%\mscorlib.ni.dll
del %targetdir%\System.Private.CoreLib.dll
del %targetdir%\System.Private.CoreLib.ni.dll
del %targetdir%\coreclr.dll
del %targetdir%\CoreRun.exe
del %targetdir%\CoreConsole.exe
del %targetdir%\clretwrc.dll
del %targetdir%\clrjit.dll
del %targetdir%\dbgshim.dll
del %targetdir%\mscordaccore.dll
del %targetdir%\mscordbi.dll
del %targetdir%\mscorrc.debug.dll
del %targetdir%\mscorrc.dll
del %targetdir%\sos.dll
:: Copy over our local build files
For %%a in (
%coreclrbuild%\mscorlib.dll
%coreclrbuild%\PDB\mscorlib.pdb
%coreclrbuild%\System.Private.CoreLib.dll
%coreclrbuild%\PDB\System.Private.CoreLib.pdb
%coreclrbuild%\coreclr.dll
%coreclrbuild%\PDB\coreclr.pdb
%coreclrbuild%\CoreRun.exe
%coreclrbuild%\CoreConsole.exe
%coreclrbuild%\clretwrc.dll
%coreclrbuild%\clrjit.dll
%coreclrbuild%\dbgshim.dll
%coreclrbuild%\mscordaccore.dll
%coreclrbuild%\mscordbi.dll
%coreclrbuild%\mscorrc.debug.dll
%coreclrbuild%\mscorrc.dll
%coreclrbuild%\sos.dll
) do copy /b/v/y "%%~a" "%targetdir%\"
:: Now, run the actual tests and generate a coverage report
SET corerunargs=%targetdir%\xunit.console.netcore.exe %project%.Tests.dll -xml %resultsfile% -notrait category=OuterLoop -notrait category=failing -notrait category=nonwindowstests
%opencover% -oldStyle -filter:%filter% -excludebyfile:"*\Common\src\System\SR.*" -nodefaultfilters -excludebyattribute:*.ExcludeFromCodeCoverage* -skipautoprops -hideskipped:All -threshold:1 -returntargetcode -register:user -targetdir:%targetdir% -target:CoreRun.exe -output:%coveragefile% -targetargs:"%corerunargs%"
%reportgenerator% -targetdir:%coveragedir% -reporttypes:Html;Badges -reports:%coveragefile% -verbosity:Error

View File

@@ -0,0 +1,39 @@
# PInvoke Analyzer
During the build of any product library in CoreFX we use a Roslyn code analyzer to look for disallowed native calls (PInvokes). When there is a violation, it will fail the build. To fix the build, either find an alternative to the PInvoke or baseline the failure temporarily. To baseline it, add the function name in the format `module!entrypoint` to a file named PInvokeAnalyzerExceptionList.analyzerdata in the same folder as the project. [Here](https://github.com/dotnet/corefx/blob/master/src/System.Diagnostics.Process/src/PInvokeAnalyzerExceptionList.analyzerdata) is an example.
If you baseline a violation, please open an issue to fix it because the library likely cannot ship in this situation. It is better to not introduce the violation. We want to clean out any baselines. There are situations where a violation may be acceptable. One situation is where we are shipping the native implementation of the API. An example of this situation is `sni.dll` which is used by SqlClient.
Each project is analyzed against one of two possible lists we maintain.
## Legal UWP API's
### Applies to
This applies to product libraries that are being built for use in a modern Windows app (aka UWP app, or app running on UAP). When building the `uapaot` or `uap` configurations we will apply this check. If the library does not have a `uap` or `uapaot` configuration explicitly listed in `Configuration.props` in the project folder, when targeting `uap` or `uapaot` we will build the `netstandard` configuration, and apply this check.
We do not currently apply this check to test binaries. Although when testing UWP libraries the tests must run within a test app, they do not need to pass through the store validation process. It is still possible they may call an API that does not work correctly within an app's security boundary and that call would have to be avoided.
### Motivation
Not all PInvokes are legal within a UWP app. An allow-list is enforced when the Windows store ingests an app, and also in a build step in the Visual Studio build process for apps. If we produce a library for UWP use, any PInvokes it performs must be to API's that are on the allow-list or the app using the library will fail validation.
### Implementation
To enforce this the analyzer consults the list [here](https://github.com/dotnet/buildtools/blob/master/src/Microsoft.DotNet.CodeAnalysis/PackageFiles/PinvokeAnalyzer_Win32UWPApis.txt).
The analyzer is enabled by default in the configurations below by the setting of the MSBuild property `UWPCompatible`. We aim to make all our `netstandard` compliant libraries work within a UWP app, but in rare cases where a library cannot, the check can be disabled with `<UWPCompatible>false</UWPCompatible>` in the project file.
There is also a more fine grained property `<EnablePInvokeUWPAnalyzer>false</EnablePInvokeUWPAnalyzer>` for temporary use.
## Legal OneCore API's
### Applies to
This applies to all other product libraries in all other configurations targeted at Windows.
We do not currently apply this check to test binaries as they do not need to run on Windows Nano.
### Motivation
.NET Core supports execution on Windows Nano, which has a reduced API surface area known as OneCore. To run on Windows Nano we cannot invoke any platform API that is not available on OneCore.
### Implementation
To enforce this the analyzer consults the list [here](https://github.com/dotnet/buildtools/blob/master/src/Microsoft.DotNet.CodeAnalysis/PackageFiles/PinvokeAnalyzer_Win32Apis.txt).
The analyzer is enabled by default when building for Windows, not a test, and not building for UWP. We aim to make all such configurations OneCore compilant, but in the rare cases where a library cannot be, the check can be disabled with `<EnablePInvokeAnalyzer>false<EnablePInvokeAnalyzer>` in the project file.

View File

@@ -1,6 +1,6 @@
Building CoreFX on FreeBSD, Linux and OS X
==========================================
##Building
## Building
1. Install the prerequisites ([Linux](#user-content-linux), [macOS](#user-content-macos))
2. Clone the corefx repo `git clone https://github.com/dotnet/corefx.git`
@@ -72,7 +72,7 @@ but the headers are no longer available since that library version is out of sup
Some compilers get upset over new headers being in `/usr/local/include` with the old library being present at
`/usr/lib/libcrypto.dylib` (the tools have no issue with the versioned files, e.g. `/usr/lib/libcrypto.0.9.8.dylib`),
and so Homebrew does not allow the OpenSSL package to be installed into system default paths. A minimal installation
is presented here to facilitiate simplifying runtime requirements and compile-time requirements (for build systems using
is presented here to facilitate simplifying runtime requirements and compile-time requirements (for build systems using
CMake's `find_package`, like ours):
```sh
# We need to make the runtime libraries discoverable, as well as make

View File

@@ -34,7 +34,7 @@ In the case of packages, there is one small difference compared to the assembly
How does the Official Build workflow works
------------------------------------------
Our Official Builds are a little different than the regular dev flow, and that is because Official Builds need the ability to not only force a specific BuildNumberMinor, but also a BuildNumberMajor. The way they do it, is by passing in the parameter `OfficialBuildId` which specifies the SeedDate that should be used and the revision of the build. For example, the following invocation: `build.cmd -OfficialBuildId=20160523.99` will use May 23 2016 as the SeedDate to generate the version, and it will set '99' as the BuildNumberMinor. With this funcionality, our OfficialBuilds are able to have an orchestrator that triggers different builds and force all of them to have the same version.
Our Official Builds are a little different than the regular dev flow, and that is because Official Builds need the ability to not only force a specific BuildNumberMinor, but also a BuildNumberMajor. The way they do it, is by passing in the parameter `OfficialBuildId` which specifies the SeedDate that should be used and the revision of the build. For example, the following invocation: `build.cmd -OfficialBuildId=20160523.99` will use May 23 2016 as the SeedDate to generate the version, and it will set '99' as the BuildNumberMinor. With this functionality, our OfficialBuilds are able to have an orchestrator that triggers different builds and force all of them to have the same version.
Getting the version of a native binary in non-Windows platforms
========================================================

View File

@@ -5,16 +5,38 @@ You can build .NET Core either via the command line or by using Visual Studio.
## Required Software
Visual Studio 2015 is required.
Visual Studio must be installed. Supported versions:
* [Visual Studio 2015](https://www.visualstudio.com/vs/older-downloads/) (Community, Professional, Enterprise). The community version is completely free.
* [Visual Studio 2017](https://www.visualstudio.com/downloads/) (Community, Professional, Enterprise). The community version is completely free.
The following free downloads are compatible:
* [Visual Studio Community 2015](http://www.visualstudio.com/en-us/downloads/visual-studio-2015-downloads-vs)
For Visual Studio 2015:
* In order to build our C++ projects be sure to select "Programming Languages | Visual C++ | Common Tools for Visual C++ 2015" while installing VS 2015 (or modify your install to include it).
* We also require that [Visual Studio 2015 Update 1](https://www.visualstudio.com/en-us/news/vs2015-update1-vs.aspx) be installed.
Note: In order to build our C++ projects be sure to select "Programming Languages | Visual C++ | Common Tools for Visual C++ 2015" while installing VS 2015 (or modify your install to include it).
For Visual Studio 2017:
* When doing a 'Workloads' based install, the following are the minimum requirements:
* .NET Desktop Development
* All Required Components
* .NET Framework 4-4.6 Development Tools
* Desktop Development with C++
* All Required Components
* VC++ 2017 v141 Toolset (x86, x64)
* Windows 8.1 SDK and UCRT SDK
* VC++ 2015.3 v140 Toolset (x86, x64)
* When doing an 'Individual Components' based install, the following are the minimum requirements:
* C# and Visual Basic Roslyn Compilers
* Static Analysis Tools
* .NET Portable Library Targeting Pack
* Windows 10 SDK or Windows 8.1 SDK
* Visual Studio C++ Core Features
* VC++ 2017 v141 Toolset (x86, x64)
* MSBuild
* .NET Framework 4.6 Targeting Pack
* Windows Universal CRT SDK
* VC++ 2015.3 v140 Toolset (x86, x64)
* Ensure you are running from the "Developer Command Prompt for VS2017"; Otherwise, the build will attempt to locate and use the VS2015 toolset.
We also require that [Visual Studio 2015 Update 1](https://www.visualstudio.com/en-us/news/vs2015-update1-vs.aspx) be installed.
[CMake](https://cmake.org/) is required to build the native libraries for Windows. To build these libraries cmake must be installed from [the CMake download page](https://cmake.org/download/) and added to your path.
[CMake](https://cmake.org/) is required to build the native libraries for Windows. To build these libraries cmake must be installed from [the CMake download page](https://cmake.org/download/#latest) and added to your path.
## Building From the Command Line
@@ -35,6 +57,11 @@ Only use it when the parameters that you are passing to the script apply for bot
For more information about the different options when building, run `build.cmd -?` and look at examples in the [developer-guide](../project-docs/developer-guide.md).
### Running tests from the command line
From the root, use `build-tests.cmd`.
For more details, or to test an individual project, see the [developer guide topic](https://github.com/dotnet/corefx/blob/master/Documentation/project-docs/developer-guide.md).
### Running tests from Visual Studio
1. Open solution of interest

View File

@@ -3,18 +3,18 @@ Recommended reading to better understand this document:
| [Project-Guidelines](https://github.com/dotnet/corefx/blob/master/Documentation/coding-guidelines/project-guidelines.md)
| [Package-Projects](https://github.com/dotnet/corefx/blob/master/Documentation/coding-guidelines/package-projects.md)
#Add APIs
# Add APIs
- [Determining versions and targets](#determining-versions-and-targets)
- [Making the changes in repo](#making-the-changes-in-repo)
- [FAQ](#faq)
##Determining versions and targets
## Determining versions and targets
1. [Determine what library](#determine-what-library) the API goes into.
2. [Determine the target framework](#determine-target-framework) for the library that will contain the API.
3. [Determine the version](#determine-library-version) for the library that will contain the API.
###Determine what library
### Determine what library
- Propose a library for exposing it as part of the [API review process](http://aka.ms/apireview).
- Keep in mind the API might be exposed in a reference assembly that
doesn't match the identity of the implementation. There are many reasons for this but
@@ -22,7 +22,7 @@ the primary reason is to abstract the runtime assembly identities across
different platforms while sharing a common API surface and allowing us to refactor
the implementation without compat concerns in future releases.
###Determine target framework
### Determine target framework
`<latest>` is the target framework version currently under development. Currently netcoreapp1.1 and netstandard1.7 (note netstandard1.7 is a placeholder for netstandard2.0).
- If the library is [part of netstandard](#isnetstandard)
@@ -39,15 +39,15 @@ the implementation without compat concerns in future releases.
- If the library is not part of netstandard and not building against netstandard
- All the rules for a library that is not part of netstandard apply but instead of having a target framework of
`netstandard<latest>` it will have a target framework of `<framework><latest>`. Example `net<latest>` for desktop or `netcoreapp<latest>` for .NET Core.
- It is not uncommon for a library to target the latest versions of multipe frameworks when adding new APIs (ex: https://github.com/dotnet/corefx/blob/master/src/System.Runtime/ref/System.Runtime.builds)
- It is not uncommon for a library to target the latest versions of multiple frameworks when adding new APIs (ex: https://github.com/dotnet/corefx/blob/master/src/System.Runtime/ref/System.Runtime.builds)
###Determine library version
### Determine library version
- If targeting netstandard
- Ensure minor version of the assembly is bumped since last stable package release
- If targeting netcoreapp
- No assembly version bump necessary
##Making the changes in repo
## Making the changes in repo
**If changing the library version**
- Update the `AssemblyVersion` property in `<Library>\dir.props` (ex: [System.Runtime\dir.props](https://github.com/dotnet/corefx/blob/master/src/System.Runtime/dir.props#L4)) to the version determined above.
@@ -95,7 +95,7 @@ the implementation without compat concerns in future releases.
- Add new test code following [conventions](https://github.com/dotnet/corefx/blob/master/Documentation/coding-guidelines/project-guidelines.md#code-file-naming-conventions) for new files to that are specific to the new target framework.
- To run just the new test configuration run `msbuild <Library>.csproj /t:RebuildAndTest /p:TargetGroup=<TargetGroup>`
##FAQ
## FAQ
_**<a name="isnetstandard">Is your API part of netstandard?</a>**_
- For netstandard2.0 refer to https://github.com/dotnet/standard/tree/master/netstandard/ref, and if any Type is included in any of the *.cs files then it is part of netstandard.
@@ -133,8 +133,8 @@ If you are moving types down you need to version both contracts at the same time
project references across the projects. You also need to be sure to leave type-forwards in the places
where you removed types in order to maintain back-compat.
###Potential clean-up work that can be done
### Potential clean-up work that can be done
- Remove old build configurations - The older build configurations will automatically be harvested from
the last stable packages and thus can be removed.
- Remove import statements - If not referencing any pre-netstandard stable packages the [imports of dotnet5.x](https://github.com/dotnet/corefx/blob/master/src/System.Diagnostics.Process/src/project.json#L28) are no longer needed and can be removed. We should also remove any dead target frameworks sections.
- Remove all non-conditionsed `<AssemblyVersion>` properties from csproj's as it should be defined in library\dir.props.
- Remove all non-conditionsed `<AssemblyVersion>` properties from csproj's as it should be defined in library\dir.props.

View File

@@ -5,39 +5,24 @@ Interop Guidelines
We have the following goals related to interop code being used in CoreFX:
- Minimize code duplication for interop.
- We should only define a given interop signature in a single place.
This stuff is tricky, and we shouldn't be copy-and-pasting it.
- We should only define a given interop signature in a single place. This stuff is tricky, and we shouldn't be copy-and-pasting it.
- Minimize unnecessary IL in assemblies.
- Interop signatures should only be compiled into the assemblies that
actually consume them. Having extra signatures bloats assemblies and
makes it more difficult to do static analysis over assemblies to
understand what they actually use. It also leads to problems when such
static verification is used as a gate, e.g. if a store verifies that
only certain APIs are used by apps in the store.
- Interop signatures should only be compiled into the assemblies that actually consume them. Having extra signatures bloats assemblies and makes it more difficult to do static analysis over assemblies to understand what they actually use. It also leads to problems when such static verification is used as a gate, e.g. if a store verifies that only certain APIs are used by apps in the store.
- Keep interop code isolated and consolidated.
- This is both for good hygiene and to help keep platform-specific code
separated from platform-neutral code, which is important for maximizing
reusable code above PAL layers.
- Ensure maximal managed code reuse across different OS flavors which have
the same API but not the same ABI.
- This is the case for UNIX and addressing it is a work-in-progress (see issue
#2137 and section on "shims" below.)
- This is both for good hygiene and to help keep platform-specific code separated from platform-neutral code, which is important for maximizing reusable code above PAL layers.
- Ensure maximal managed code reuse across different OS flavors which have the same API but not the same ABI.
- This is the case for UNIX and addressing it is a work-in-progress (see issue #2137 and section on "shims" below.)
## Approach
### Interop type
- All code related to interop signatures (DllImports, interop structs
used in DllImports, constants that map to native values, etc.) should
live in a partial, static, and internal “Interop” class in the root
namespace, e.g.
- All code related to interop signatures (DllImports, interop structs used in DllImports, constants that map to native values, etc.) should live in a partial, static, and internal “Interop” class in the root namespace, e.g.
```C#
internal static partial class Interop { ... }
```
- Declarations shouldn't be in Interop directly, but rather within a
partial, static, internal nested type named for a given library or set
of libraries, e.g.
- Declarations shouldn't be in Interop directly, but rather within a partial, static, internal nested type named for a given library or set of libraries, e.g.
```C#
internal static partial class Interop
@@ -50,31 +35,14 @@ internal static partial class Interop
internal static partial class mincore { ... }
}
```
- With few exceptions, the only methods that should be defined in these
interop types are DllImports.
- Exceptions are limited to times when most or every consumer of a
particular DllImport will need to wrap its invocation in a helper, e.g.
to provide additional marshaling support, to hide thread-safety issues
in the underlying OS implementation, to do any required manipulation of
safe handles, etc. In such cases, the DllImport should be private
whenever possible rather than internal, with the helper code exposed to
consumers rather than having the DllImport exposed directly.
- With few exceptions, the only methods that should be defined in these interop types are DllImports.
- Exceptions are limited to times when most or every consumer of a particular DllImport will need to wrap its invocation in a helper, e.g. to provide additional marshaling support, to hide thread-safety issues in the underlying OS implementation, to do any required manipulation of safe handles, etc. In such cases, the DllImport should be private whenever possible rather than internal, with the helper code exposed to consumers rather than having the DllImport exposed directly.
### File organization
- The Interop partial class definitions should live in Interop.*.cs
files. These Interop.*.cs files should all live under Common rather than
within a given assembly's folder.
- The only exception to this should be when an assembly P/Invokes to its
own native library that isn't available to or consumed by anyone else,
e.g. System.IO.Compression P/Invoking to clrcompression.dll. In such
cases, System.IO.Compression should have its own Interop folder which
follows a similar scheme as outlined in this proposal, but just for
these private P/Invokes.
- Under Common\src\Interop, we'll have a folder for each target
platform, and within each platform, for each library from which
functionality is being consumed. The Interop.*.cs files will live within
those library folders, e.g.
- The Interop partial class definitions should live in Interop.*.cs files. These Interop.*.cs files should all live under Common rather than within a given assembly's folder.
- The only exception to this should be when an assembly P/Invokes to its own native library that isn't available to or consumed by anyone else, e.g. System.IO.Compression P/Invoking to clrcompression.dll. In such cases, System.IO.Compression should have its own Interop folder which follows a similar scheme as outlined in this proposal, but just for these private P/Invokes.
- Under Common\src\Interop, we'll have a folder for each target platform, and within each platform, for each library from which functionality is being consumed. The Interop.*.cs files will live within those library folders, e.g.
```
\Common\src\Interop
@@ -91,15 +59,9 @@ internal static partial class Interop
As shown above, platforms may be additive, in that an assembly may use functionality from multiple folders, e.g. System.IO.FileSystem's Linux build will use functionality both from Unix (common across all Unix systems) and from Linux (specific to Linux and not available across non-Linux Unix systems).
 
- Interop.*.cs files are created in a way such that every assembly
consuming the file will need every DllImport it contains.
- If multiple related DllImports will all be needed by every consumer,
they may be declared in the same file, named for the functionality
grouping, e.g. Interop.IOErrors.cs.
- Otherwise, in the limit (and the expected case for most situations)
each Interop.*.cs file will contain a single DllImport and associated
interop types (e.g. the structs used with that signature) and helper
wrappers, e.g. Interop.strerror.cs.
- Interop.*.cs files are created in a way such that every assembly consuming the file will need every DllImport it contains.
- If multiple related DllImports will all be needed by every consumer, they may be declared in the same file, named for the functionality grouping, e.g. Interop.IOErrors.cs.
- Otherwise, in the limit (and the expected case for most situations) each Interop.*.cs file will contain a single DllImport and associated interop types (e.g. the structs used with that signature) and helper wrappers, e.g. Interop.strerror.cs.
```
\Common\src\Interop
@@ -111,14 +73,8 @@ As shown above, platforms may be additive, in that an assembly may use functiona
\Interop.OutputDebugString.cs
```
- If structs/constants will be used on their own without an associated
DllImport, or if they may be used with multiple DllImports not in the
same file, they should be declared in a separate file.
- In the case of multiple overloads of the same DllImport (e.g. some
overloads taking a SafeHandle and others taking an IntPtr, or overloads
taking different kinds of SafeHandles), if they can't all be declared in
the same file (because they won't all be consumed by all consumers), the
file should be qualified with the key differentiator, e.g.
- If structs/constants will be used on their own without an associated DllImport, or if they may be used with multiple DllImports not in the same file, they should be declared in a separate file.
- In the case of multiple overloads of the same DllImport (e.g. some overloads taking a SafeHandle and others taking an IntPtr, or overloads taking different kinds of SafeHandles), if they can't all be declared in the same file (because they won't all be consumed by all consumers), the file should be qualified with the key differentiator, e.g.
```
\Common\src\Interop
@@ -128,11 +84,7 @@ As shown above, platforms may be additive, in that an assembly may use functiona
\Interop.DuplicateHandle_IntPtr.cs
```
- The library names used per-platform are stored in internal constants
in the Interop class in a private Libraries class in a per-platform file
named Interop.Libraries.cs. These constants are then used for all
DllImports to that library, rather than having the string duplicated
each time, e.g.
- The library names used per-platform are stored in internal constants in the Interop class in a private Libraries class in a per-platform file named Interop.Libraries.cs. These constants are then used for all DllImports to that library, rather than having the string duplicated each time, e.g.
```C#
internal static partial class Interop // contents of Common\src\Interop\Windows\Interop.Libraries.cs
@@ -151,9 +103,7 @@ internal static partial class Interop // contents of Common\src\Interop\Windows\
}
```
(Note that this will likely result in some extra constants defined in
each assembly that uses interop, which minimally violates one of the
goals, but it's very minimal.)
(Note that this will likely result in some extra constants defined in each assembly that uses interop, which minimally violates one of the goals, but it's very minimal.)
 
- .csproj project files then include the interop code they need, e.g.
```XML
@@ -170,40 +120,26 @@ goals, but it's very minimal.)
```
### Build System
When building CoreFx, we use the "OSGroup" property to control what
target platform we are building for. The valid values for this property
are Windows_NT (which is the default value from MSBuild when running on
Windows), Linux and OSX.
When building CoreFx, we use the "OSGroup" property to control what target platform we are building for. The valid values for this property are Windows_NT (which is the default value from MSBuild when running on Windows), Linux and OSX.
The build system sets a few MSBuild properties, depending on the OSGroup
setting:
The build system sets a few MSBuild properties, depending on the OSGroup setting:
* TargetsWindows
* TargetsLinux
* TargetsOSX
* TargetsUnix
TargetsUnix is true for both OSX and Linux builds and can be used to
include code that can be used on both Linux and OSX (e.g. it is written
against a POSIX API that is present on both platforms).
TargetsUnix is true for both OSX and Linux builds and can be used to include code that can be used on both Linux and OSX (e.g. it is written against a POSIX API that is present on both platforms).
You should not test the value of the OSGroup property directly, instead
use one of the values above.
You should not test the value of the OSGroup property directly, instead use one of the values above.
#### Project Files
Whenever possible, a single .csproj should be used per assembly,
spanning all target platforms, e.g. System.Console.csproj includes
conditional entries for when targeting Windows vs when targeting Linux.
A property can be passed to msbuild to control which flavor is built,
e.g. msbuild /p:OSGroup=OSX System.Console.csproj.
Whenever possible, a single .csproj should be used per assembly, spanning all target platforms, e.g. System.Console.csproj includes conditional entries for when targeting Windows vs when targeting Linux. A property can be passed to msbuild to control which flavor is built, e.g. msbuild /p:OSGroup=OSX System.Console.csproj.
### Constants
- Wherever possible, constants should be defined as "const". Only if the
data type doesn't support this (e.g. IntPtr) should they instead be
static readonly fields.
- Wherever possible, constants should be defined as "const". Only if the data type doesn't support this (e.g. IntPtr) should they instead be static readonly fields.
- Related constants should be grouped under a partial, static, internal
type, e.g. for error codes they'd be grouped under an Errors type:
- Related constants should be grouped under a partial, static, internal type, e.g. for error codes they'd be grouped under an Errors type:
```C#
internal static partial class Interop
@@ -229,72 +165,53 @@ internal static partial class Interop
}
```
Using enums instead of partial, static classes can lead to needing lots
of casts at call sites and can cause problems if such a type needs to be
split across multiple files (enums can't currently be partial). However,
enums can be valuable in making it clear in a DllImport signature what
values are permissible. Enums may be used in limited circumstances where
these aren't concerns: the full set of values can be represented in the
enum, and the interop signature can be defined to use the enum type
rather than the underlying integral type.
Using enums instead of partial, static classes can lead to needing lots of casts at call sites and can cause problems if such a type needs to be split across multiple files (enums can't currently be partial). However, enums can be valuable in making it clear in a DllImport signature what values are permissible. Enums may be used in limited circumstances where these aren't concerns: the full set of values can be represented in the enum, and the interop signature can be defined to use the enum type rather than the underlying integral type.
## Naming
### Naming
- Interop signatures / structs / constants should be defined using the
same name / capitalization / etc. that's used in the corresponding
native code.
- We should not rename any of these based on managed coding guidelines.
The only exception to this is for the constant grouping type, which
should be named with the most discoverable name possible; if that name
is a concept (e.g. Errors), it can be named using managed naming
guidelines.
Interop signatures / structs / constants should be defined using the same name / capitalization / etc. that's used in the corresponding native code.
- We should not rename any of these based on managed coding guidelines. The only exception to this is for the constant grouping type, which should be named with the most discoverable name possible; if that name is a concept (e.g. Errors), it can be named using managed naming guidelines.
### Definition
When defining the P/Invoke signatures and structs, the following guidelines should be followed. More details on P/Invoke behavior and these guidelines can be found here: [P/Invokes](interop-pinvokes)
- Interop signatures / structs / constants should be defined using the same name / capitalization / etc. that's used in the corresponding native code.
- Avoid using `StringBuilder`, particularly as an output buffer to avoid over allocating.
- Use blittable types in structs where possible (not `string` and `bool`).
- Use `sizeof()` for blittable structs, not `Marshal.SizeOf<MyStruct>()`
- Use C# type keywords that map as closely to the underlying type as possible (e.g. use `uint` when the native type is unsigned, not `int` or `System.UInt`).
- Use `ArrayPool` for buffer pooling.
- Be careful of return string termination when allocating buffers (add room for null where needed).
- Only use `bool` for 32 bit types (matches `BOOL` not `BOOLEAN`).
- Use `[In]` and `[Out]` only when they differ from the implicit behavior.
- Explicitly specify the `CharSet` as `Ansi` or `Unicode` when the signature has a string.
- Use `ExactSpelling` to avoid probing for A/W signature variants.
- Do not set `PreserveSig` to false.
## UNIX shims
Often, various UNIX flavors offer the same API from the point-of-view of compatibility
with C/C++ source code, but they do not have the same ABI. e.g. Fields can be laid out
differently, constants can have different numeric values, exports can
be named differently, etc. There are not only differences between operating systems
(Mac OS X vs. Ubuntu vs. FreeBSD), but also differences related to the underlying
processor architecture (x64 vs. x86 vs. ARM).
Often, various UNIX flavors offer the same API from the point-of-view of compatibility with C/C++ source code, but they do not have the same ABI. e.g. Fields can be laid out differently, constants can have different numeric values, exports can be named differently, etc. There are not only differences between operating systems (Mac OS X vs. Ubuntu vs. FreeBSD), but also differences related to the underlying processor architecture (x64 vs. x86 vs. ARM).
This leaves us with a situation where we can't write portable P/Invoke declarations
that will work on all flavors, and writing separate declarations per flavor is quite
fragile and won't scale.
This leaves us with a situation where we can't write portable P/Invoke declarations that will work on all flavors, and writing separate declarations per flavor is quite fragile and won't scale.
To address this, we're moving to a model where all UNIX interop from corefx starts with
a P/Invoke to a C++ lib written specifically for corefx. These libs -- System.*.Native.so
(aka "shims") -- are intended to be very thin layers over underlying platform libraries.
Generally, they are not there to add any significant abstraction, but to create a
stable ABI such that the same IL assembly can work across UNIX flavors.
To address this, we're moving to a model where all UNIX interop from corefx starts with a P/Invoke to a C++ lib written specifically for corefx. These libs -- System.*.Native.so (aka "shims") -- are intended to be very thin layers over underlying platform libraries. Generally, they are not there to add any significant abstraction, but to create a stable ABI such that the same IL assembly can work across UNIX flavors.
Guidelines for shim C++ API:
- Keep them as "thin"/1:1 as possible.
- We want to write the majority of code in C#.
- Never skip the shim and P/Invoke directly to the underlying platform API. It's
easy to assume something is safe/guaranteed when it isn't.
- Don't cheat and take advantage of coincidental agreement between
one flavor's ABI and the shim's ABI.
- Never skip the shim and P/Invoke directly to the underlying platform API. It's easy to assume something is safe/guaranteed when it isn't.
- Don't cheat and take advantage of coincidental agreement between one flavor's ABI and the shim's ABI.
- Use PascalCase in a style closer to Win32 than libc.
- If an export point has a 1:1 correspondence to the platform API, then name
it after the platform API in PascalCase (e.g. stat -> Stat, fstat -> FStat).
- If an export is not 1:1, then spell things out as we typically would in
CoreFX code (i.e. don't use abbreviations unless they come from the underlying
API.
- At first, it seemed that we'd want to use 1:1 names throughout, but it
turns out there are many cases where being strictly 1:1 isn't practical.
- In order to reduce the chance of collisions when linking with CoreRT, all
exports should have a prefix that corresponds to the Libraries' name, e.g.
"SystemNative_" or "CryptoNative_" to make the method name more unique.
See https://github.com/dotnet/corefx/issues/4818.
- If an export point has a 1:1 correspondence to the platform API, then name it after the platform API in PascalCase (e.g. stat -> Stat, fstat -> FStat).
- If an export is not 1:1, then spell things out as we typically would in CoreFX code (i.e. don't use abbreviations unless they come from the underlying API.
- At first, it seemed that we'd want to use 1:1 names throughout, but it turns out there are many cases where being strictly 1:1 isn't practical.
- In order to reduce the chance of collisions when linking with CoreRT, all exports should have a prefix that corresponds to the Libraries' name, e.g. "SystemNative_" or "CryptoNative_" to make the method name more unique. See https://github.com/dotnet/corefx/issues/4818.
- Stick to data types which are guaranteed not to vary in size across flavors.
- Use int32_t, int64_t, etc. from stdint.h and not int, long, etc.
- Use char* for ASCII or UTF-8 strings and uint8_t* for byte buffers.
- Note that sizeof(char) == 1 is guaranteed.
- Do not use size_t in shim API. Always pick a fixed size. Often, it is most
convenient to line up with the managed int as int32_t (e.g. scratch buffer
size for read/write), but sometimes we need to handle huge sizes (e.g.
memory mapped files) and therefore use uint64_t.
- Do not use size_t in shim API. Always pick a fixed size. Often, it is most convenient to line up with the managed int as int32_t (e.g. scratch buffer size for read/write), but sometimes we need to handle huge sizes (e.g. memory mapped files) and therefore use uint64_t.
- Use int64_t for native off_t values.

View File

@@ -0,0 +1,228 @@
P/Invokes
=========
This document extends the [Interop Guidelines](interop-guidelines.md) to provide more specific guidelines, notes, and resources for defining P/Invokes.
Attributes
----------
**Implicit attributes applied to parameter and return values:**
| | Implicit Attribute |
|------------------|--------------------|
| parameter | `[In]` |
| `out` parameter | `[Out]` |
| `ref` parameter | `[In],[Out]` |
| return value | `[Out]` |
**`[DllImport()]` [1] attribute settings:**
| Setting | Recommendation | Details |
|---------|----------------|---------|
| [`PreserveSig`][2] | keep default | When this is explicitly set to false (the default is true), failed HRESULT return values will be turned into Exceptions (and the return value in the definition becomes null as a result).|
| [`SetLastError`][3] | as per API | Set this to true (default is false) if the API uses GetLastError and use Marshal.GetLastWin32Error to get the value. If the API sets a condition that says it has an error, get the error before making other calls to avoid inadvertently having it overwritten.|
| [`ExactSpelling`][4] | `true` | Set this to true (default is false) and gain a slight perf benefit as the framework will avoid looking for an "A" or "W" version. (See NDirectMethodDesc::FindEntryPoint).|
| [`CharSet`][5] | Explicitly use `CharSet.Unicode` or `CharSet.Ansi` when strings are present in the definition | This specifies marshalling behavior of strings and what `ExactSpelling` does when `false`. Be explicit with this one as the documented default is `CharSet.Ansi`. Note that `CharSet.Ansi` is actually UTF8 on Unix (`CharSet.Utf8` is coming). _Most_ of the time Windows uses Unicode while Unix uses UTF8. |
[1]: https://msdn.microsoft.com/en-us/library/system.runtime.interopservices.dllimportattribute.aspx "MSDN"
[2]: https://msdn.microsoft.com/en-us/library/system.runtime.interopservices.dllimportattribute.preservesig.aspx "MSDN"
[3]: https://msdn.microsoft.com/en-us/library/system.runtime.interopservices.dllimportattribute.setlasterror.aspx "MSDN"
[4]: https://msdn.microsoft.com/en-us/library/system.runtime.interopservices.dllimportattribute.exactspelling.aspx "MSDN"
[5]: https://msdn.microsoft.com/en-us/library/system.runtime.interopservices.dllimportattribute.charset.aspx "MSDN"
Strings
-------
When the CharSet is Unicode or the argument is explicitly marked as `[MarshalAs(UnmanagedType.LPWSTR)]` _and_ the string is passed by value (not `ref` or `out`) the string will be be pinned and used directly by native code (rather than copied).
Remember to mark the `[DllImport]` as `Charset.Unicode` unless you explicitly want ANSI treatment of your strings.
**[AVOID]** `StringBuilder` marshalling *always* creates a native buffer copy (see `ILWSTRBufferMarshaler`). As such it can be extremely inefficient. Take the typical
scenario of calling a Windows API that takes a string:
1. Create a SB of the desired capacity (allocates managed capacity) **{1}**
2. Invoke
1. Allocates a native buffer **{2}**
2. Copies the contents if `[In]` _(the default for a `StringBuilder` parameter)_
3. Copies the native buffer into a newly allocated managed array if `[Out]` **{3}** _(also the default for `StringBuilder`)_
3. `ToString()` allocates yet another managed array **{4}**
That is *{4}* allocations to get a string out of native code. The best you can do to limit this is to reuse the `StringBuilder`
in another call but this still only saves *1* allocation. It is much better to use and cache a character buffer from `ArrayPool`- you can then get down to just the allocation for the `ToString()` on subsequent calls.
The other issue with `StringBuilder` is that it always copies the return buffer back up to the first null. If the passed back string isn't terminated or is a double-null-terminated string your P/Invoke is incorrect at best.
If you *do* use `StringBuilder` one last gotcha is that the capacity does **not** include a hidden null which is always accounted for in interop. It is pretty common for people to get this wrong as most APIs want the size of the buffer *including* the null. This can result in wasted/unnecessary allocations.
**[USE]** Char arrays from `ArrayPool` or `StringBuffer`.
[Default Marshalling for Strings](https://msdn.microsoft.com/en-us/library/s9ts558h.aspx "MSDN")
> ### Windows Specific
> For `[Out]` strings the CLR will use `CoTaskMemFree` by default to free strings or `SysStringFree` for strings that are marked
as `UnmanagedType.BSTR`.
> **For most APIs with an output string buffer:**
> The passed in character count must include the null. If the returned value is less than the passed in character count the call has succeeded and the value is the number of characters *without* the trailing null. Otherwise the count is the required size of the buffer *including* the null character.
> - Pass in 5, get 4: The string is 4 characters long with a trailing null.
> - Pass in 5, get 6: The string is 5 characters long, need a 6 character buffer to hold the null.
> [Windows Data Types for Strings](http://msdn.microsoft.com/en-us/library/dd374131.aspx "MSDN")
Booleans
--------
Booleans are easy to mess up. The default marshalling for P/Invoke is as the Windows type `BOOL`, where it is a 4 byte value. `BOOLEAN`, however, is a *single* byte. This can lead to hard to track down bugs as half the return value will be discarded, which will only *potentially* change the result. For `BOOLEAN` attributing `bool` with either `[MarshalAs(UnmanagedType.U1)]` or `[MarshalAs(UnmanagedType.I1)]` will work as `TRUE` is defined as `1` and `FALSE` is defined as `0`. `U1` is technically more correct as `BOOLEAN` is defined as an `unsigned char`.
`bool` is not a blittable type (see blitting below). As such, when defining structs it is recommended to use `Interop.BOOL.cs` for `BOOL` to get the best performance.
[Default Marshalling for Boolean Types](https://msdn.microsoft.com/en-us/library/t2t3725f.aspx "MSDN")
Guids
-----
Guids are usable directly in signatures. When passed by ref they can either be passed by `ref` or with the `[MarshalAs(UnmanagedType.LPStruct)]` attribute.
| Guid | By ref Guid |
|------|-------------|
| `KNOWNFOLDERID` | `REFKNOWNFOLDERID` |
`[MarshalAs(UnmanagedType.LPStruct)]` should _only_ be used for by ref Guids.
Common Data Types
-----------------
|Windows | C | C# | Alternative |
|---------------|-------------------|-------|-------------|
|`BOOL` |`int` |`int` |`bool`
|`BOOLEAN` |`unsigned char` |`byte` |`[MarshalAs(UnmanagedType.U1)] bool`
|`BYTE` |`unsigned char` |`byte` | |
|`CHAR` |`char` |`sbyte` | |
|`UCHAR` |`unsigned char` |`byte` | |
|`SHORT` |`short` |`short` | |
|`CSHORT` |`short` |`short` | |
|`USHORT` |`unsigned short` |`ushort` | |
|`WORD` |`unsigned short` |`ushort` | |
|`ATOM` |`unsigned short` |`ushort` | |
|`INT` |`int` |`int` | |
|`LONG` |`long` |`int` | |
|`ULONG` |`unsigned long` |`uint` | |
|`DWORD` |`unsigned long` |`uint` | |
|`LARGE_INTEGER` |`__int64` |`long` | |
|`LONGLONG` |`__int64` |`long` | |
|`ULONGLONG` |`unsigned __int64` |`ulong` | |
|`ULARGE_INTEGER` |`unsigned __int64` |`ulong` | |
|`UCHAR` |`unsigned char` |`byte` | |
|`HRESULT` |`long` |`int` | |
| Signed Pointer Types (`IntPtr`) | Unsigned Pointer Types (`UIntPtr`) |
|----------------------------------|-------------------------------------|
| `HANDLE` | `WPARAM` |
| `HWND` | `UINT_PTR` |
| `HINSTANCE` | `ULONG_PTR` |
| `LPARAM` | `SIZE_T` |
| `LRESULT` | |
| `LONG_PTR` | |
| `INT_PTR` | |
[Windows Data Types](http://msdn.microsoft.com/en-us/library/aa383751.aspx "MSDN")
[Data Type Ranges](http://msdn.microsoft.com/en-us/library/s3f49ktz.aspx "MSDN")
Blittable Types
---------------
Blittable types are types that have the same representation for native code. As such they do not need to be converted to another format to be marshalled to and from native code, and as this improves performance they should be preferred.
**Blittable types:**
- `byte`, `sbyte`, `short`, `ushort`, `int`, `uint`, `long`, `ulong`, `single`, `double`
- non-nested one dimensional arrays of blittable types (e.g. `int[]`)
- structs and classes with fixed layout that only have blittable types for instance fields
- fixed layout requires `[StructLayout(LayoutKind.Sequential)]` or `[StructLayout(LayoutKind.Explicit)]`
- structs are `LayoutKind.Sequential` by default, classes are `LayoutKind.Auto`
**NOT blittable:**
- `bool`
**SOMETIMES blittable:**
- `char`, `string`
When blittable types are passed by reference they are simply pinned by the marshaller instead of being copied to an intermediate buffer. (Classes are inherently passed by reference, structs are passed by reference when used with `ref` or `out`.)
`char` is blittable in a one dimensional array **or** if it is part of a type that contains it is explicitly marked with `[StructLayout]` with `CharSet = CharSet.Unicode`.
```C#
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
public struct UnicodeCharStruct
{
public char c;
}
```
`string` is blittable if it isn't contained in another type and it's being passed as an argument that is marked with `[MarshalAs(UnmanagedType.LPWStr)]` or the `[DllImport]` has `CharSet = CharSet.Unicode` set.
You can see if a type is blittable by attempting to create a pinned `GCHandle`. If the type is not a string or considered blittable `GCHandle.Alloc` will throw an `ArgumentException`.
[Blittable and Non-Blittable Types](https://msdn.microsoft.com/en-us/library/75dwhxf7.aspx "MSDN")
[Default Marshalling for Value Types](https://msdn.microsoft.com/en-us/library/0t2cwe11.aspx "MSDN")
Keeping Managed Objects Alive
-----------------------------
`GC.KeepAlive()` will ensure an object stays in scope until the KeepAlive method is hit.
[`HandleRef`][6] allows the marshaller to keep an object alive for the duration of a P/Invoke. It can be used instead of `IntPtr` in method signatures. `SafeHandle` effectively replaces this class and should be used instead.
[6]: https://msdn.microsoft.com/en-us/library/system.runtime.interopservices.handleref.aspx "MSDN"
[`GCHandle`][7] allows pinning a managed object and getting the native pointer to it. Basic pattern is:
``` C#
GCHandle handle = GCHandle.Alloc(obj, GCHandleType.Pinned);
IntPtr ptr = handle.AddrOfPinnedObject();
handle.Free();
```
[7]: https://msdn.microsoft.com/en-us/library/system.runtime.interopservices.gchandle.aspx "MSDN"
Pinning is not the default for `GCHandle`. The other major pattern is for passing a reference to a managed object through native code back to managed code (via a callback, typically). Here is the pattern:
``` C#
GCHandle handle = GCHandle.Alloc(obj);
SomeNativeEnumerator(callbackDelegate, GCHandle.ToIntPtr(handle));
// In the callback
GCHandle handle = GCHandle.FromIntPtr(param);
object managedObject = handle.Target;
// After the last callback
handle.Free();
```
Don't forget that `GCHandle` needs to be explicitly freed to avoid memory leaks.
Structs
-------
Managed structs are created on the stack and aren't removed until the method returns. By definition then, they are "pinned" (it won't get moved by the GC). You can also simply take the address in unsafe code blocks if native code won't use the pointer past the end of the current method.
Blittable structs are much more performant as they they can simply be used directly by the marshalling layer. Try to make structs blittable (for example, avoid `bool`). See the "Blittable Types" section above for more details.
*If* the struct is blittable use `sizeof()` instead of `Marshal.SizeOf<MyStruct>()` for better performance. As mentioned above, you can validate that the type is blittable by attempting to create a pinned `GCHandle`. If the type is not a string or considered blittable `GCHandle.Alloc` will throw an `ArgumentException`.
Pointers to structs in definitions must either be passed by `ref` or use `unsafe` and `*`.
Other References
----------------
[MarshalAs Attribute](http://msdn.microsoft.com/en-us/library/system.runtime.interopservices.marshalasattribute.aspx "MSDN")
[GetLastError and managed code](http://blogs.msdn.com/b/adam_nathan/archive/2003/04/25/56643.aspx "MSDN")
[Copying and Pinning](https://msdn.microsoft.com/en-us/library/23acw07k.aspx "MSDN")
[Marshalling between Managed and Unmanaged Code (MSDN Magazine January 2008)](http://download.microsoft.com/download/3/A/7/3A7FA450-1F33-41F7-9E6D-3AA95B5A6AEA/MSDNMagazineJanuary2008en-us.chm) *This is a .chm download*

View File

@@ -1,5 +1,5 @@
##dotnet/CoreFx
###Libraries in NETStandard
## dotnet/CoreFx
### Libraries in NETStandard
- ref
- Default targetgroup should be NETCoreApp build
- P2P references to other reference assembly CSProjs.
@@ -26,7 +26,7 @@
- Use P2P references to pkgproj for things not in NETStandard.Library
- Implementation is automatically injected by targets.
###Libraries above NETStandard
### Libraries above NETStandard
- ref
- Only required if component is inbox somewhere or has multiple implementations for same NETStandard version.
- Build against NETStandard.Library package
@@ -45,7 +45,7 @@
- Use P2P references for things not in NETStandard.Library
- Implementation is automatically injected by targets.
###NETStandard compatibility facade
### NETStandard compatibility facade
Provides compatibility between NETCore.App and libraries built against NETStandard.
- ref
- Should adapt supported NETStandard.dll to contract reference assemblies.
@@ -57,7 +57,7 @@ Provides compatibility between NETCore.App and libraries built against NETStanda
- No individual package builds.
- Should be included in NETCoreApp package as above
###Desktop compatibility facades
### Desktop compatibility facades
- ref
- Should adapt latest desktop surface to contract reference assemblies for anything that has type-overlap with desktop, including assemblies like Microsoft.Win32.Registry which are not in NETStandard.Library.
- EG: `GenFacades -contracts:<desktopReferenceAssemblies> -seeds:<allNetCoreAppReferenceAssemblies>`
@@ -68,20 +68,20 @@ Provides compatibility between NETCore.App and libraries built against NETStanda
- No individual package builds.
- Should be included in NETCoreApp package as above
###Native shims
### Native shims
- pkg
- No individual package builds.
- As with libraries in NETStandard the shims will be included in the runtime specific packages for NETCoreApp
##Transition
## Transition
###End goal
### End goal
- CoreFx does not build any reference assemblies for NETStandard.
- For every library in NETStandard.Library, the only configurations in CoreFx are framework-specific. EG: NETCoreApp1.2, UAP10.1
- For every library in NETCore.App but not in NETStandard.Library there must be a framework-specific configuration for NETCoreApp1.2. Other configurations may exist to ship in a package, but those will not be built by folks building just NETCore.App.
###Getting there (WIP)
### Getting there (WIP)
Folks still consume our current packages so we need to keep building those until we transition.
@@ -93,4 +93,4 @@ Folks still consume our current packages so we need to keep building those until
- As packages are deleted we'll need to opt-in to Microsoft.Private.CoreFx.NETCore.App in some way.
- proposal:
- each CSProj is evaluated for layout path in the context of all of its build configurations.
- We'll determine applicability similar to how we do for pkgprojs to idenitify which config to binplace.
- We'll determine applicability similar to how we do for pkgprojs to identify which config to binplace.

View File

@@ -10,7 +10,7 @@ In either case the file name of the `.pkgproj` is just {assemblyName}.pkgproj an
## Package samples
### Simple portable library
This is the simplest case. The package project need only reference the single project that implements the portable libary.
This is the simplest case. The package project need only reference the single project that implements the portable library.
Sample `System.Text.Encodings.Web.pkgproj`
```
@@ -147,7 +147,7 @@ Tests can be similarly filtered grouping the compilation directives under:
(from `\tests\FunctionalTests\System.Net.Security.Tests.csproj`)
### Platform-specific library
These packages need to provide a different platform specific implementation on each platform. They do this by splitting the implementations into seperate packages and associating those platform specific packages with the primary reference package. Each platform specific package sets `PackageTargetRuntime` to the specific platform RID that it applies.
These packages need to provide a different platform specific implementation on each platform. They do this by splitting the implementations into separate packages and associating those platform specific packages with the primary reference package. Each platform specific package sets `PackageTargetRuntime` to the specific platform RID that it applies.
Sample `System.IO.FileSystem.pkgproj`
```
@@ -222,7 +222,7 @@ Sample `System.IO.FileSystem.pkgproj`
```
## Asset selection
The makeup of a package folder is primarily a grouping of project references to the projects that compose that package. Settings within each referenced project determines where that asset will be placed in the package. For example, reference assembly projects will be placed under the `ref/{targetMoniker}` folder in the package and implementations will be under either `lib/{targetMoniker}` or `runtimes/{rid}/lib/{targetMoniker}`. Whenever NuGet evaulates a package in the context of a referencing project it will choose the best compile time asset (preferring `ref`, then falling back to `lib`) and runtime asset (preffering `runtimes/{rid}/lib` and falling back to `lib`) for every package that is referenced. For more information see http://docs.nuget.org/.
The makeup of a package folder is primarily a grouping of project references to the projects that compose that package. Settings within each referenced project determines where that asset will be placed in the package. For example, reference assembly projects will be placed under the `ref/{targetMoniker}` folder in the package and implementations will be under either `lib/{targetMoniker}` or `runtimes/{rid}/lib/{targetMoniker}`. Whenever NuGet evaluates a package in the context of a referencing project it will choose the best compile time asset (preferring `ref`, then falling back to `lib`) and runtime asset (preferring `runtimes/{rid}/lib` and falling back to `lib`) for every package that is referenced. For more information see http://docs.nuget.org/.
Asset projects (`.csproj`, `.vbproj`, or `.depproj`) can control their `{targetMoniker}` using the `PackageTargetFramework` property in the project file. Similarly `{rid}` is controlled using the `PackageTargetRuntime` property. In the corefx repo we automatically select default values for these properties based on the [Build pivots](#build-pivots). These can be overridden in the project reference using metadata of the same name, but this is rarely needed.
@@ -261,7 +261,7 @@ Part of package build is to ensure that a package is applicable on all platforms
</ProjectReference>
```
2. Through SupportedFramework items with Version metdata.
2. Through SupportedFramework items with Version metadata.
```
<!-- no version indicates latest is supported -->
<SupportedFramework Include="net46;netcore50;netcoreapp1.0" />
@@ -271,7 +271,7 @@ Part of package build is to ensure that a package is applicable on all platforms
</SupportedFramework>
```
###Inbox assets
### Inbox assets
Some libraries are supported inbox on particular frameworks. For these frameworks the package should not present any assets for (ref or lib) for that framework, but instead permit installation and provide no assets. We do this in the package by using placeholders ref and lib folders for that framework. In the package project one can use `InboxOnTargetFramework` items. The following is an example from the System.Linq.Expressions package.
```
<InboxOnTargetFramework Include="net45" />
@@ -292,8 +292,8 @@ If the library is also a "classic" reference assembly, not referenced by default
Package validation will catch a case where we know a library is supported inbox but a package is using an asset from the package. This data is driven by framework lists from previously-shipped targeting packs. The error will appear as: *Framework net45 should support Microsoft.CSharp inbox but {explanation of problem}. You may need to add <InboxOnTargetFramework Include="net45" /> to your project.*
###External assets
Runtime specific packages are used to break apart implementations into seperate packages and enable "pay-for-play". For example: don't download the Windows implementation if we're only building/deploying for linux. In most cases we can completely seperate implementations into seperate packages such that they easily translate. For example:
### External assets
Runtime specific packages are used to break apart implementations into separate packages and enable "pay-for-play". For example: don't download the Windows implementation if we're only building/deploying for linux. In most cases we can completely separate implementations into separate packages such that they easily translate. For example:
```
runtimes/win/lib/dotnet5.4/System.Banana.dll
runtimes/unix/lib/dotnet5.4/System.Banana.dll
@@ -305,7 +305,7 @@ Consider the following:
runtimes/win/lib/dotnet5.4/System.Banana.dll
runtimes/win/lib/net46/System.Banana.dll
```
Suppose we wanted to split the desktop (`net46`) implementation into a seperate package than the portable implementation. Doing so would cause both the `dotnet5.4` asset and the `net46` asset to be applicable and result in a bin-clash. This is because in a single package the `net46` asset is preferred over the `dotnet5.4` asset, but in seperate packages both are in view. The packaging validation will catch this problem and display an error such as
Suppose we wanted to split the desktop (`net46`) implementation into a separate package than the portable implementation. Doing so would cause both the `dotnet5.4` asset and the `net46` asset to be applicable and result in a bin-clash. This is because in a single package the `net46` asset is preferred over the `dotnet5.4` asset, but in separate packages both are in view. The packaging validation will catch this problem and display an error such as
*System.Banana includes both package1/runtimes/win/lib/net46/System.Banana.dll and package2/runtimes/win/lib/dotnet5.4/System.Banana.dll an on net46 which have the same name and will clash when both packages are used.*
@@ -315,7 +315,7 @@ The fix for the error is to put a placeholder in the package that contains the a
<ExternalOnTargetFramework Include="net46" />
```
###Not supported
### Not supported
In rare cases a particular library might represent itself as targeting a specific portable moniker (eg: `dotnet5.4`) but it cannot be supported on a particular target framework that is included in that portable moniker for other reasons. One example of this is System.Diagnostics.Process. The surface area of this API is portable to dotnet5.4 and could technically run in UWP based on its managed dependencies. The native API, however, is not supported in app container. To prevent this package and packages which depend on from installing in UWP projects, only to fail at runtime, we can block the package from being installed.
To do this we create a placeholder in the lib folder with the following syntax. The resulting combination will be an applicable ref asset with no applicable lib and NuGet's compat check will fail.

View File

@@ -1,8 +1,8 @@
#Build Project Guidelines
# Build Project Guidelines
In order to work in corefx repo you must first run build.cmd/sh from the root of the repo at least
once before you can iterate and work on a given library project.
##Behind the scenes with build.cmd/sh
## Behind the scenes with build.cmd/sh
- Setup tools (currently done in init-tools but will later be a boot-strap script in run.cmd/sh)
- Restore external dependencies
@@ -18,17 +18,17 @@ once before you can iterate and work on a given library project.
- Build src\sign.builds
//**CONSIDER**: We should make this as part of the src.builds file instead of a separate .builds file.
##Behind the scenes with build-test.cmd/sh
## Behind the scenes with build-test.cmd/sh
- build-test.cmd cannot be ran successfully until build.cmd has been ran at least once for a `BuildConfiguration`.
- Build src\tests.builds which builds all applicable test projects. For test project information see [tests](#tests).
- The build pass will happen twice. Once for the specific `$(BuildConfiguration)` and once for netstandard. That way we run both sets of applicable tests against for the given `$(BuildConfiguration)`.
- TODO: Currently as part of src/post.builds we call CloudBuild.targets which sets up our test runs. This needs to be moved to be part of build-test.cmd now.
##Behind the scenes with build-packages.cmd/sh
## Behind the scenes with build-packages.cmd/sh
- build-packages.cmd cannot be run successfully until build.cmd has been ran at least once for a BuildConfiguration.
- Build src\packages.builds which will build only the packages it has the context to build which will generally be only the ones for the given `BuildConfiguration`. If a package requires assets from multiple `BuildConfigurations` it will require that all `BuildConfigurations` are built first.
#Build Pivots
# Build Pivots
Below is a list of all the various options we pivot the project builds on:
- **Target Frameworks:** NetFx (aka Desktop), netstandard (aka dotnet/Portable), NETCoreApp (aka .NET Core), UAP (aka UWP/Store/netcore50)
@@ -37,7 +37,7 @@ Below is a list of all the various options we pivot the project builds on:
- **Flavor:** Debug, Release
- **Architecture:** x86, x64, arm, arm64, AnyCPU
##Individual build properties
## Individual build properties
The following are the properties associated with each build pivot
- `$(TargetGroup) -> netstandard | netcoreapp | netcoreappcorert | netfx | uap | uapaot`
@@ -49,7 +49,7 @@ The following are the properties associated with each build pivot
For more information on various targets see also [.NET Standard](https://github.com/dotnet/standard/blob/master/docs/versions.md)
##Aggregate build properties
## Aggregate build properties
Each project will define a set of supported build configurations
```
@@ -88,12 +88,12 @@ All supported targets with unique windows/unix build for netcoreapp:
<PropertyGroup>
```
##Options for building
## Options for building
A full or individual project build is centered around BuildConfiguration and will be setup in one of the following ways:
1. `$(BuildConfiguration)` can directly be passed to the build.
2. `$(Configuration)` can be passed to the build and `$(BuildConfiguration)` will be set to `$(Configuration)-$(ArchGroup)`. This is a convinence mechanism primarily to help with VS support because VS uses the `Configuration` property for switching between various configurations in the UI. NOTE: this only works well for individual projects and not the root builds.
2. `$(Configuration)` can be passed to the build and `$(BuildConfiguration)` will be set to `$(Configuration)-$(ArchGroup)`. This is a convenience mechanism primarily to help with VS support because VS uses the `Configuration` property for switching between various configurations in the UI. NOTE: this only works well for individual projects and not the root builds.
3. `$(TargetGroup), $(OSGroup), $(ConfigurationGroup), $(ArchGroup)` can individually be passed in to change the default value for just part of the `BuildConfiguration`.
4. If nothing is passed to the build then we will default `BuildConfiguration` from the environment. Example: `netcoreapp-[OSGroup Running On]-Debug-x64`.
@@ -101,20 +101,20 @@ On top of the `BuildConfiguration` we also have `RuntimeOS` which can be passed
Any of the mentioned properties can be set via `/p:<Property>=<Value>` at the command line. When building using our run tool or any of the wrapper scripts around it (i.e. build.cmd) a number of these properties have aliases which make them easier to pass (run build.cmd/sh -? for the aliases).
##Selecting the correct build configuration
## Selecting the correct build configuration
When building an individual project the `BuildConfiguation` will be used to select the closest matching configuration listed in the projects `BuildConfigurations` property. The rules used to select the configuration will consider compatible target frameworks and OS fallbacks.
TODO: Link to the target framework and OS fallbacks when they are available.
Temporary versions are at https://github.com/dotnet/corefx/blob/dev/eng/src/Tools/GenerateProps/osgroups.props and https://github.com/dotnet/corefx/blob/dev/eng/src/Tools/GenerateProps/targetgroups.props
##Supported full build configurations
## Supported full build configurations
- .NET Core latest on current OS (default) -> `netcoreapp-[RunningOS]`
- .NET Core CoreRT -> `netcoreappcorert-[RunningOS]`
- .NET Framework latest -> `netfx-Windows_NT`
- UWP -> `uapaot-Windows_NT`
- UAP F5 -> `uap-Windows_NT`
##Project configurations for VS
## Project configurations for VS
For each unique configuration needed for a given library project a configuration property group should be added to the project so it can be selected and built in VS and also clearly identify the various configurations.<BR/>
`<PropertyGroup Condition="'$(Configuration)|$(Platform)' == '$(OSGroup)-$(TargetGroup)-$(ConfigurationGroup)|$(Platform)'">`
@@ -144,7 +144,17 @@ Project configurations that are unique for a few different target frameworks and
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'uap101-Release|AnyCPU'" />
```
#Library project guidelines
## Updating Configurations
We have a build task that you can run to automatically update all the projects with the above boilerplate as well as updating all the solution files for the libraries. Whenever you change the list of configurations for a project you can regenerate all these for the entire repo by running:
```
msbuild build.proj /t:UpdateVSConfigurations
```
If you want to scope the geneneration you can either undo changes that you don't need or you can temporally limit the set of projects or directories by updating the item set in the UpdateVSConfigurations target in https://github.com/dotnet/corefx/blob/master/build.proj
# Library project guidelines
Library projects should use the following directory layout.
```
@@ -154,7 +164,7 @@ src\<Library Name>\pkg - Contains package projects for the library.
src\<Library Name>\tests - Contains the test code for a library
```
##ref
## ref
Reference assemblies are required for any library that has more than one implementation or uses a facade. A reference assembly is a surface-area-only assembly that represents the public API of the library. To generate a reference assembly source file you can use the [GenAPI tool](https://www.nuget.org/packages/Microsoft.DotNet.BuildTools.GenAPI). If a library is a pure portable library with a single implementation it need not use a reference assembly at all.
In the ref directory for the library there should be at most **one** `.csproj` that contains the latest API for the reference assembly for the library. That project can contain multiple entries in its `BuildConfigurations` property.
@@ -163,39 +173,39 @@ There are two types of reference assembly projects:
1. Libraries that are contain APIs in netstandard
- `BuildConfigurations` should contain non-netstandard configurations for the platforms they support.
- Should use a relative path `<ProjectReference>` to the dependencies it has. Those depedencies should only be libraries with similar build configurations and be part of netstandard.
- Should use a relative path `<ProjectReference>` to the dependencies it has. Those dependencies should only be libraries with similar build configurations and be part of netstandard.
<BR/>//**CONSIDER**: just using Reference with a custom task to pull from TP or turn to ProjectReference
2. Libraries that are built on top of netstandard
- `BuildConfigurations` should contain only netstandard configurations.
- Should contain `<Reference Include='netstandard'>`
- Anything outside of netstandard should use a relative path `<ProjectReference>` to its dependencies it has. Those depdencies should only be libraries that are built against netstandard as well.
- Anything outside of netstandard should use a relative path `<ProjectReference>` to its dependencies it has. Those dependencies should only be libraries that are built against netstandard as well.
###ref output
### ref output
The output for the ref project build will be a flat targeting pack folder in the following directory:
`bin\ref\$(TargetGroup)`
<BR/>//**CONSIDER**: Do we need a specific BuildConfiguration version of TargetGroup for this output path to ensure all projects output to same targeting path?
##src
## src
In the src directory for a library there should be only **one** `.csproj` file that contains any information necessary to build the library in various configurations. All supported configurations should be listed in the `BuildConfigurations` property.
All libraries should use `<Reference Include="..." />` for all their project references. That will cause them to be resolved against a targeting pack (i.e. `bin\ref\netcoreapp` or `\bin\ref\netstanard`) based on the project configuration. There should not be any direct project references to other libraries. The only exception to that rule right now is for partial facades which directly reference System.Private.CoreLib and thus need to directly reference other partial facades to avoid type conflicts.
<BR>//**CONSIDER**: just using Reference and use a reference to System.Private.CoreLib as a trigger to turn the other References into a ProjectReference automatically. That will allow us to have consistency where all projects just use Reference.
###src output
### src output
The output for the src product build will be a flat runtime folder into the following directory:
`bin\runtime\$(BuildConfiguration)`
Note: The `BuildConfiguration` is the global property and not the project configuration because we need all projects to output to the same runtime directory no matter which compatible configuration we select and build the project with.
##pkg
## pkg
In the pkg directory for the library there should be only **one** `.pkgproj` for the primary package for the library. If the library has platform-specific implementations those should be split into platform specific projects in a subfolder for each platform. (see [Package projects](./package-projects.md))
TODO: Outline changes needed for pkgprojs
##tests
## tests
Similar to the src projects tests projects will define a `BuildConfigurations` property so they can list out the set of build configurations they support.
Tests should not have any `<Reference>` or `<ProjectReference>` items in their project because they will automatically reference everything in the targeting pack based on the configuration they are building in. The only exception to this is a `<ProjectReference>` can be used to reference other test helper libraries or assets.
@@ -203,13 +213,13 @@ Tests should not have any `<Reference>` or `<ProjectReference>` items in their p
In order to build and run a test project in a given configuration a root level build.cmd/sh must have been completed for that configuration first. Tests will run on the live built runtime at `bin\runtime\$(BuildConfiguration)`.
TODO: We need update our test host so that it can run from the shared runtime directory as well as resolve assemblies from the test output directory.
###tests output
### tests output
All test outputs should be under
`bin\tests\$(MSBuildProjectName)\$(BuildConfiguration)` or
`bin\tests\$(MSBuildProjectName)\netstandard`
##Facades
## Facades
Facade are unique in that they don't have any code and instead are generated by finding a contract reference assembly with the matching identity and generating type forwards for all the types to where they live in the implementation assemblies (aka facade seeds). There are also partial facades which contain some type forwards as well as some code definitions. All the various build configurations should be contained in the one csproj file per library.
TODO: Fill in more information about the required properties for creatng a facade project.

Some files were not shown because too many files have changed in this diff Show More